HDDS-447. Separate ozone-dist and hadoop-dist projects with real classpath separation. Contributed by Elek Marton.

This commit is contained in:
Bharat Viswanadham 2018-09-24 10:10:11 -07:00
parent d060cbea48
commit 62f817d32e
74 changed files with 503 additions and 475 deletions

View File

@ -1,181 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# project.version
VERSION=$1
# project.build.directory
BASEDIR=$2
#hdds.version
HDDS_VERSION=$3
function run()
{
declare res
echo "\$ ${*}"
"${@}"
res=$?
if [[ ${res} != 0 ]]; then
echo
echo "Failed!"
echo
exit "${res}"
fi
}
function findfileindir()
{
declare file="$1"
declare dir="${2:-./share}"
declare count
count=$(find "${dir}" -iname "${file}" | wc -l)
#shellcheck disable=SC2086
echo ${count}
}
function copyifnotexists()
{
declare src="$1"
declare dest="$2"
declare srcname
declare destdir
declare child
declare childpath
if [[ -f "${src}" ]]; then
srcname=${src##*/}
if [[ "${srcname}" != *.jar ||
$(findfileindir "${srcname}") -eq "0" ]]; then
destdir=$(dirname "${dest}")
mkdir -p "${destdir}"
cp -p "${src}" "${dest}"
fi
else
for childpath in "${src}"/*; do
child="${childpath##*/}"
if [[ "${child}" == "doc" ||
"${child}" == "webapps" ]]; then
mkdir -p "${dest}/${child}"
cp -r "${src}/${child}"/* "${dest}/${child}"
continue;
fi
copyifnotexists "${src}/${child}" "${dest}/${child}"
done
fi
}
#Copy all contents as is except the lib.
#for libs check for existence in share directory, if not exist then only copy.
function copy()
{
declare src="$1"
declare dest="$2"
declare child
declare childpath
if [[ -d "${src}" ]]; then
for childpath in "${src}"/*; do
child="${childpath##*/}"
if [[ "${child}" == "share" ]]; then
copyifnotexists "${src}/${child}" "${dest}/${child}"
else
if [[ -d "${src}/${child}" ]]; then
mkdir -p "${dest}/${child}"
cp -pr "${src}/${child}"/* "${dest}/${child}"
else
cp -pr "${src}/${child}" "${dest}/${child}"
fi
fi
done
fi
}
# shellcheck disable=SC2164
ROOT=$(cd "${BASEDIR}"/../..;pwd)
echo
echo "Current directory $(pwd)"
echo
run rm -rf "ozone-${HDDS_VERSION}"
run mkdir "ozone-${HDDS_VERSION}"
run cd "ozone-${HDDS_VERSION}"
run cp -p "${ROOT}/LICENSE.txt" .
run cp -p "${ROOT}/NOTICE.txt" .
# Copy hadoop-common first so that it have always have all dependencies.
# Remaining projects will copy only libraries which are not present already in 'share' directory.
run copy "${ROOT}/hadoop-common-project/hadoop-common/target/hadoop-common-${VERSION}" .
# HDDS
run copy "${ROOT}/hadoop-hdds/common/target/hadoop-hdds-common-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/framework/target/hadoop-hdds-server-framework-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/server-scm/target/hadoop-hdds-server-scm-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/container-service/target/hadoop-hdds-container-service-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/client/target/hadoop-hdds-client-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/tools/target/hadoop-hdds-tools-${HDDS_VERSION}" .
# Ozone
run copy "${ROOT}/hadoop-ozone/common/target/hadoop-ozone-common-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/ozone-manager/target/hadoop-ozone-ozone-manager-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/client/target/hadoop-ozone-client-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/tools/target/hadoop-ozone-tools-${HDDS_VERSION}" .
#shaded ozonefs
mkdir -p "./share/hadoop/ozonefs"
cp "${ROOT}/hadoop-ozone/ozonefs/target/hadoop-ozone-filesystem-${HDDS_VERSION}.jar" "./share/hadoop/ozonefs/hadoop-ozone-filesystem-${HDDS_VERSION}.jar"
#shaded datanode service
mkdir -p "./share/hadoop/ozoneplugin"
cp "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}-plugin.jar" "./share/hadoop/ozoneplugin/hadoop-ozone-datanode-plugin-${HDDS_VERSION}.jar"
# Optional documentation, could be missing
cp -r "${ROOT}/hadoop-ozone/docs/target/classes/webapps/docs" ./share/hadoop/ozone/webapps/ozoneManager/
cp -r "${ROOT}/hadoop-ozone/docs/target/classes/webapps/docs" ./share/hadoop/hdds/webapps/scm/
cp -r "${ROOT}/hadoop-ozone/docs/target/classes/webapps/docs" ./
rm sbin/*all.sh
rm sbin/*all.cmd
#remove test and java sources
find . -name "*tests.jar" | xargs rm
find . -name "*sources.jar" | xargs rm
find . -name jdiff -type d | xargs rm -rf
#add ozone specific readme
run cp "${ROOT}/hadoop-dist/src/main/ozone/README.txt" README.txt
#Copy docker compose files and robot tests
run cp -p -r "${ROOT}/hadoop-dist/src/main/compose" .
run cp -p -r "${ROOT}/hadoop-dist/src/main/smoketest" .
mkdir -p ./share/hadoop/mapreduce
mkdir -p ./share/hadoop/yarn
mkdir -p ./share/hadoop/hdfs
echo
echo "Hadoop Ozone dist layout available at: ${BASEDIR}/ozone-${HDDS_VERSION}"
echo

View File

@ -176,7 +176,6 @@
</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
<argument>${hdds.version}</argument>
</arguments>
</configuration>
</execution>
@ -224,123 +223,6 @@
</plugins>
</build>
</profile>
<profile>
<id>hdds</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-ozone-manager</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-scm</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-tools</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-container-service</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-objectstore-service</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-tools</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-docs</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-filesystem</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>copy-dockerfile</id>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>prepare-package</phase>
<configuration>
<outputDirectory>${project.build.directory}</outputDirectory>
<resources>
<resource>
<directory>src/main/docker</directory>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>dist-ozone</id>
<phase>prepare-package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}
</workingDirectory>
<arguments>
<argument>
${basedir}/../dev-support/bin/ozone-dist-layout-stitching
</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
<argument>${hdds.version}</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>tar-ozone</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}
</workingDirectory>
<arguments>
<argument>${basedir}/../dev-support/bin/ozone-dist-tar-stitching
</argument>
<argument>${hdds.version}</argument>
<argument>${project.build.directory}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -29,11 +29,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop HDDS Client</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdds</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -30,8 +30,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<properties>
<hdds.version>0.3.0-SNAPSHOT</hdds.version>
<hadoop.component>hdds</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
<log4j2.version>2.11.0</log4j2.version>
<disruptor.version>3.4.2</disruptor.version>
<declared.hdds.version>${hdds.version}</declared.hdds.version>

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop HDDS Container Service</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdds</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop HDDS Server Framework</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdds</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -119,6 +119,36 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<excludeFilterFile combine.self="override"></excludeFilterFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>add-classpath-descriptor</id>
<phase>package</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<attach>true</attach>
<prefix>$HDDS_LIB_JARS_DIR</prefix>
<outputFilterFile>true</outputFilterFile>
<includeScope>runtime</includeScope>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop HDDS SCM Server</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdds</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
@ -104,28 +99,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>copy web resources</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy toDir="${project.build.directory}/webapps">
<fileset dir="${basedir}/src/main/webapps">
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
@ -136,19 +109,20 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<goals>
<goal>unpack</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-framework</artifactId>
<outputDirectory>${project.build.outputDirectory}
</outputDirectory>
<includes>webapps/static/**/*.*</includes>
</artifactItem>
</artifactItems>
<overWriteSnapshots>true</overWriteSnapshots>
</configuration>
</execution>
</executions>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-framework</artifactId>
<outputDirectory>${project.build.directory}/</outputDirectory>
<includes>webapps/static/**/*.*</includes>
</artifactItem>
</artifactItems>
<overWriteSnapshots>true</overWriteSnapshots>
</configuration>
</plugin>
</plugins>
</build>

View File

@ -29,26 +29,18 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop HDDS Tools</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdds</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-cli</groupId>

View File

@ -28,16 +28,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Client</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-common</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Common</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
</dependencies>

View File

@ -68,6 +68,7 @@ function ozonecmd_case
datanode)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME=org.apache.hadoop.ozone.HddsDatanodeService
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-datanode"
;;
envvars)
echo "JAVA_HOME='${JAVA_HOME}'"
@ -86,42 +87,50 @@ function ozonecmd_case
;;
freon)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.freon.Freon
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
;;
genesis)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.genesis.Genesis
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
;;
getozoneconf)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.freon.OzoneGetConf;
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
;;
om)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME=org.apache.hadoop.ozone.om.OzoneManager
HDFS_OM_OPTS="${HDFS_OM_OPTS} -Dlog4j.configurationFile=${HADOOP_CONF_DIR}/om-audit-log4j2.properties"
HADOOP_OPTS="${HADOOP_OPTS} ${HDFS_OM_OPTS}"
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-ozone-manager"
;;
sh | shell)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.web.ozShell.Shell
;;
noz)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.scm.cli.SQLCLI
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-ozone-manager"
;;
scm)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME='org.apache.hadoop.hdds.scm.server.StorageContainerManager'
hadoop_debug "Appending HDFS_STORAGECONTAINERMANAGER_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HDFS_STORAGECONTAINERMANAGER_OPTS}"
OZONE_RUN_ARTIFACT_NAME="hadoop-hdds-server-scm"
;;
fs)
HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
;;
scmcli)
HADOOP_CLASSNAME=org.apache.hadoop.hdds.scm.cli.SCMCLI
OZONE_RUN_ARTIFACT_NAME="hadoop-hdds-tools"
;;
version)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.util.OzoneVersionInfo
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-common"
;;
genconf)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.genconf.GenerateOzoneRequiredConfigurations
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
;;
*)
HADOOP_CLASSNAME="${subcmd}"
@ -161,6 +170,7 @@ fi
HADOOP_SUBCMD=$1
shift
if hadoop_need_reexec ozone "${HADOOP_SUBCMD}"; then
hadoop_uservar_su ozone "${HADOOP_SUBCMD}" \
"${MYNAME}" \
@ -180,6 +190,40 @@ else
ozonecmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
fi
#
# Setting up classpath based on the generate classpath descriptors
#
if [ ! "$OZONE_RUN_ARTIFACT_NAME" ]; then
echo "ERROR: Ozone components require to set OZONE_RUN_ARTIFACT_NAME to set the classpath"
exit -1
fi
export HDDS_LIB_JARS_DIR="${HADOOP_HDFS_HOME}/share/ozone/lib"
CLASSPATH_FILE="${HADOOP_HDFS_HOME}/share/ozone/classpath/${OZONE_RUN_ARTIFACT_NAME}.classpath"
if [ ! "$CLASSPATH_FILE" ]; then
echo "ERROR: Classpath file descriptor $CLASSPATH_FILE is missing"
exit -1
fi
# shellcheck disable=SC1090,SC2086
source $CLASSPATH_FILE
OIFS=$IFS
IFS=':'
# shellcheck disable=SC2154
for jar in $classpath; do
hadoop_add_classpath "$jar"
done
hadoop_add_classpath "${HADOOP_HDFS_HOME}/share/ozone/web"
#We need to add the artifact manually as it's not part the generated classpath desciptor
ARTIFACT_LIB_DIR="${HADOOP_HDFS_HOME}/share/ozone/lib"
MAIN_ARTIFACT=$(find "$ARTIFACT_LIB_DIR" -name "${OZONE_RUN_ARTIFACT_NAME}-*.jar")
if [ ! "$MAIN_ARTIFACT" ]; then
echo "ERROR: Component jar file $MAIN_ARTIFACT is missing from ${HADOOP_HDFS_HOME}/share/ozone/lib"
fi
hadoop_add_classpath "${MAIN_ARTIFACT}"
IFS=$OIFS
hadoop_add_client_opts
if [[ ${HADOOP_WORKER_MODE} = true ]]; then

View File

@ -19,27 +19,3 @@ if [[ "${HADOOP_SHELL_EXECNAME}" = ozone ]]; then
fi
## @description Profile for hdds/ozone components.
## @audience private
## @stability evolving
function _ozone_hadoop_classpath
{
#
# get all of the ozone jars+config in the path
#
if [[ -d "${HADOOP_HDFS_HOME}/${HDDS_DIR}/webapps" ]]; then
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDDS_DIR}"
fi
if [[ -d "${HADOOP_HDFS_HOME}/${HDDS_DIR}/webapps" ]]; then
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONE_DIR}"
fi
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDDS_LIB_JARS_DIR}"'/*'
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDDS_DIR}"'/*'
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONE_LIB_JARS_DIR}"'/*'
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONE_DIR}"'/*'
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONEFS_DIR}"'/*'
}

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone</artifactId>
<version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>hadoop-ozone-datanode</artifactId>
<name>Apache Hadoop Ozone Datanode</name>
<packaging>jar</packaging>
<version>0.3.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-container-service</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-objectstore-service</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,113 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# project.build.directory
BASEDIR=$1
#hdds.version
HDDS_VERSION=$2
## @audience private
## @stability evolving
function run()
{
declare res
echo "\$ ${*}"
"${@}"
res=$?
if [[ ${res} != 0 ]]; then
echo
echo "Failed!"
echo
exit "${res}"
fi
}
## @audience private
## @stability evolving
function findfileindir()
{
declare file="$1"
declare dir="${2:-./share}"
declare count
count=$(find "${dir}" -iname "${file}" | wc -l)
#shellcheck disable=SC2086
echo ${count}
}
# shellcheck disable=SC2164
ROOT=$(cd "${BASEDIR}"/../../..;pwd)
echo
echo "Current directory $(pwd)"
echo
run rm -rf "ozone-${HDDS_VERSION}"
run mkdir "ozone-${HDDS_VERSION}"
run cd "ozone-${HDDS_VERSION}"
run cp -p "${ROOT}/LICENSE.txt" .
run cp -p "${ROOT}/NOTICE.txt" .
run cp -p "${ROOT}/README.txt" .
run mkdir -p ./share/hadoop/mapreduce
run mkdir -p ./share/hadoop/ozone
run mkdir -p ./share/hadoop/hdds
run mkdir -p ./share/hadoop/yarn
run mkdir -p ./share/hadoop/hdfs
run mkdir -p ./share/hadoop/common
run mkdir -p ./share/ozone/web
run mkdir -p ./bin
run mkdir -p ./sbin
run mkdir -p ./etc
run mkdir -p ./libexec
run cp -r "${ROOT}/hadoop-common-project/hadoop-common/src/main/conf" "etc/hadoop"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/hadoop" "bin/"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd" "bin/"
run cp "${ROOT}/hadoop-ozone/common/src/main/bin/ozone" "bin/"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh" "libexec/"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd" "libexec/"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh" "libexec/"
run cp "${ROOT}/hadoop-ozone/common/src/main/bin/ozone-config.sh" "libexec/"
run cp -r "${ROOT}/hadoop-ozone/common/src/main/shellprofile.d" "libexec/"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh" "sbin/"
run cp "${ROOT}/hadoop-common-project/hadoop-common/src/main/bin/workers.sh" "sbin/"
run cp "${ROOT}/hadoop-ozone/common/src/main/bin/start-ozone.sh" "sbin/"
run cp "${ROOT}/hadoop-ozone/common/src/main/bin/stop-ozone.sh" "sbin/"
#shaded ozonefs
run mkdir -p "./share/hadoop/ozonefs"
run cp "${ROOT}/hadoop-ozone/ozonefs/target/hadoop-ozone-filesystem-${HDDS_VERSION}.jar" "./share/hadoop/ozonefs/hadoop-ozone-filesystem-${HDDS_VERSION}.jar"
#shaded datanode service
run mkdir -p "./share/hadoop/ozoneplugin"
run cp "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}-plugin.jar" "./share/hadoop/ozoneplugin/hadoop-ozone-datanode-plugin-${HDDS_VERSION}.jar"
# Optional documentation, could be missing
cp -r "${ROOT}/hadoop-ozone/docs/target/classes/webapps/docs" ./share/hadoop/ozone/webapps/ozoneManager/
cp -r "${ROOT}/hadoop-ozone/docs/target/classes/webapps/docs" ./share/hadoop/hdds/webapps/scm/
#Copy docker compose files
run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/compose" .
run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/smoketest" .

View File

@ -21,6 +21,8 @@ VERSION=$1
# project.build.directory
BASEDIR=$2
## @audience private
## @stability evolving
function run()
{
declare res

184
hadoop-ozone/dist/pom.xml vendored Normal file
View File

@ -0,0 +1,184 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone</artifactId>
<version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>hadoop-ozone-dist</artifactId>
<name>Apache Hadoop Ozone Distribution</name>
<packaging>pom</packaging>
<version>0.3.0-SNAPSHOT</version>
<properties>
<file.encoding>UTF-8</file.encoding>
<downloadSources>true</downloadSources>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-classpath-files</id>
<phase>package</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<outputDirectory>target/ozone-${ozone.version}/share/ozone/classpath
</outputDirectory>
<artifactItems>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-scm</artifactId>
<version>${hdds.version}</version>
<classifier>classpath</classifier>
<destFileName>hadoop-hdds-server-scm.classpath</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-tools</artifactId>
<version>${hdds.version}</version>
<classifier>classpath</classifier>
<destFileName>hadoop-hdds-tools.classpath</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-ozone-manager</artifactId>
<version>${ozone.version}</version>
<classifier>classpath</classifier>
<destFileName>hadoop-ozone-ozone-manager.classpath
</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-tools</artifactId>
<version>${ozone.version}</version>
<classifier>classpath</classifier>
<destFileName>hadoop-ozone-tools.classpath</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-common</artifactId>
<version>${ozone.version}</version>
<classifier>classpath</classifier>
<destFileName>hadoop-ozone-common.classpath</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-datanode</artifactId>
<version>${ozone.version}</version>
<classifier>classpath</classifier>
<destFileName>hadoop-ozone-datanode.classpath</destFileName>
</artifactItem>
</artifactItems>
</configuration>
</execution>
<execution>
<id>copy-jars</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>target/ozone-${ozone.version}/share/ozone/lib
</outputDirectory>
<includeScope>runtime</includeScope>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>dist</id>
<phase>prepare-package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}</workingDirectory>
<arguments>
<argument>
${basedir}/dev-support/bin/dist-layout-stitching
</argument>
<argument>${project.build.directory}</argument>
<argument>${hdds.version}</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>tar-ozone</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}
</workingDirectory>
<arguments>
<argument>${basedir}/dev-support/bin/dist-tar-stitching
</argument>
<argument>${hdds.version}</argument>
<argument>${project.build.directory}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-tools</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-scm</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-container-service</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-ozone-manager</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-tools</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-datanode</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -61,4 +61,4 @@ services:
- "./prometheus.yml:/etc/prometheus.yml"
command: ["--config.file","/etc/prometheus.yml"]
ports:
- 9090:9090
- 9090:9090

View File

@ -33,4 +33,3 @@ HADOOP_OPTS=-javaagent:/opt/jmxpromo.jar=port=0:consulHost=consul:consulMode=nod
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
LOG4J.PROPERTIES_log4j.logger.org.apache.ratis.conf.ConfUtils=WARN
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Documentation</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
</dependencies>

View File

@ -28,37 +28,23 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Integration Tests</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-ozone-manager</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-objectstore-service</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-tools</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Object Store REST Service</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>

View File

@ -28,11 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Manager Server</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
@ -61,28 +56,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>copy web resources</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy toDir="${project.build.directory}/webapps">
<fileset dir="${basedir}/src/main/webapps">
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
@ -93,19 +66,20 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<goals>
<goal>unpack</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-framework</artifactId>
<outputDirectory>${project.build.outputDirectory}
</outputDirectory>
<includes>webapps/static/**/*.*</includes>
</artifactItem>
</artifactItems>
<overWriteSnapshots>true</overWriteSnapshots>
</configuration>
</execution>
</executions>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdds-server-framework</artifactId>
<outputDirectory>${project.build.directory}/</outputDirectory>
<includes>webapps/static/**/*.*</includes>
</artifactItem>
</artifactItems>
<overWriteSnapshots>true</overWriteSnapshots>
</configuration>
</plugin>
</plugins>
</build>

View File

@ -28,8 +28,6 @@
<properties>
<file.encoding>UTF-8</file.encoding>
<downloadSources>true</downloadSources>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<build>

View File

@ -19,9 +19,9 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId>
<artifactId>hadoop-project</artifactId>
<version>3.2.0-SNAPSHOT</version>
<relativePath>../hadoop-project-dist</relativePath>
<relativePath>../hadoop-project</relativePath>
</parent>
<artifactId>hadoop-ozone</artifactId>
<version>0.3.0-SNAPSHOT</version>
@ -42,6 +42,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<module>ozonefs</module>
<module>integration-test</module>
<module>objectstore-service</module>
<module>datanode</module>
<module>dist</module>
<module>docs</module>
</modules>
@ -161,6 +163,25 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<excludeFilterFile combine.self="override"></excludeFilterFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>add-classpath-descriptor</id>
<phase>package</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<attach>true</attach>
<prefix>$HDDS_LIB_JARS_DIR</prefix>
<outputFilterFile>true</outputFilterFile>
<includeScope>runtime</includeScope>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -28,21 +28,33 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>Apache Hadoop Ozone Tools</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>ozone</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-filesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
@ -59,6 +71,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>jmh-generator-annprocess</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-integration-test</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
</dependencies>
<build>
<plugins>

View File

@ -616,6 +616,11 @@
<artifactId>hadoop-ozone-tools</artifactId>
<version>${hdds.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-datanode</artifactId>
<version>${hdds.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-integration-test</artifactId>