HDDS-40. Separating packaging of Ozone/HDDS from the main Hadoop.
Contributed by Elek, Marton.
This commit is contained in:
parent
50408cfc69
commit
4b4f24ad5f
5
.gitignore
vendored
5
.gitignore
vendored
@ -48,3 +48,8 @@ patchprocess/
|
|||||||
.history/
|
.history/
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/package-lock.json
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/package-lock.json
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn-error.log
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn-error.log
|
||||||
|
|
||||||
|
#robotframework outputs
|
||||||
|
log.html
|
||||||
|
output.xml
|
||||||
|
report.html
|
||||||
|
@ -146,21 +146,6 @@ run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-api/target/hadoop-client-
|
|||||||
run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-runtime/target/hadoop-client-runtime-${VERSION}.jar" share/hadoop/client/
|
run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-runtime/target/hadoop-client-runtime-${VERSION}.jar" share/hadoop/client/
|
||||||
run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-minicluster/target/hadoop-client-minicluster-${VERSION}.jar" share/hadoop/client/
|
run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-minicluster/target/hadoop-client-minicluster-${VERSION}.jar" share/hadoop/client/
|
||||||
|
|
||||||
# HDDS
|
|
||||||
run copy "${ROOT}/hadoop-hdds/common/target/hadoop-hdds-common-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-hdds/framework/target/hadoop-hdds-server-framework-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-hdds/server-scm/target/hadoop-hdds-server-scm-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-hdds/container-service/target/hadoop-hdds-container-service-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-hdds/client/target/hadoop-hdds-client-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-hdds/tools/target/hadoop-hdds-tools-${HDDS_VERSION}" .
|
|
||||||
|
|
||||||
# Ozone
|
|
||||||
run copy "${ROOT}/hadoop-ozone/common/target/hadoop-ozone-common-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-ozone/ozone-manager/target/hadoop-ozone-ozone-manager-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-ozone/client/target/hadoop-ozone-client-${HDDS_VERSION}" .
|
|
||||||
run copy "${ROOT}/hadoop-ozone/tools/target/hadoop-ozone-tools-${HDDS_VERSION}" .
|
|
||||||
|
|
||||||
run copy "${ROOT}/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${VERSION}" .
|
run copy "${ROOT}/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${VERSION}" .
|
||||||
|
|
||||||
|
|
||||||
|
153
dev-support/bin/ozone-dist-layout-stitching
Executable file
153
dev-support/bin/ozone-dist-layout-stitching
Executable file
@ -0,0 +1,153 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
# project.version
|
||||||
|
VERSION=$1
|
||||||
|
|
||||||
|
# project.build.directory
|
||||||
|
BASEDIR=$2
|
||||||
|
|
||||||
|
#hdds.version
|
||||||
|
HDDS_VERSION=$3
|
||||||
|
|
||||||
|
function run()
|
||||||
|
{
|
||||||
|
declare res
|
||||||
|
|
||||||
|
echo "\$ ${*}"
|
||||||
|
"${@}"
|
||||||
|
res=$?
|
||||||
|
if [[ ${res} != 0 ]]; then
|
||||||
|
echo
|
||||||
|
echo "Failed!"
|
||||||
|
echo
|
||||||
|
exit "${res}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function findfileindir()
|
||||||
|
{
|
||||||
|
declare file="$1"
|
||||||
|
declare dir="${2:-./share}"
|
||||||
|
declare count
|
||||||
|
|
||||||
|
count=$(find "${dir}" -iname "${file}" | wc -l)
|
||||||
|
|
||||||
|
#shellcheck disable=SC2086
|
||||||
|
echo ${count}
|
||||||
|
}
|
||||||
|
|
||||||
|
function copyifnotexists()
|
||||||
|
{
|
||||||
|
declare src="$1"
|
||||||
|
declare dest="$2"
|
||||||
|
|
||||||
|
declare srcname
|
||||||
|
declare destdir
|
||||||
|
|
||||||
|
declare child
|
||||||
|
declare childpath
|
||||||
|
|
||||||
|
if [[ -f "${src}" ]]; then
|
||||||
|
srcname=${src##*/}
|
||||||
|
if [[ "${srcname}" != *.jar ||
|
||||||
|
$(findfileindir "${srcname}") -eq "0" ]]; then
|
||||||
|
destdir=$(dirname "${dest}")
|
||||||
|
mkdir -p "${destdir}"
|
||||||
|
cp -p "${src}" "${dest}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
for childpath in "${src}"/*; do
|
||||||
|
child="${childpath##*/}"
|
||||||
|
if [[ "${child}" == "doc" ||
|
||||||
|
"${child}" == "webapps" ]]; then
|
||||||
|
mkdir -p "${dest}/${child}"
|
||||||
|
cp -r "${src}/${child}"/* "${dest}/${child}"
|
||||||
|
continue;
|
||||||
|
fi
|
||||||
|
copyifnotexists "${src}/${child}" "${dest}/${child}"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
#Copy all contents as is except the lib.
|
||||||
|
#for libs check for existence in share directory, if not exist then only copy.
|
||||||
|
function copy()
|
||||||
|
{
|
||||||
|
declare src="$1"
|
||||||
|
declare dest="$2"
|
||||||
|
|
||||||
|
declare child
|
||||||
|
declare childpath
|
||||||
|
|
||||||
|
if [[ -d "${src}" ]]; then
|
||||||
|
for childpath in "${src}"/*; do
|
||||||
|
child="${childpath##*/}"
|
||||||
|
|
||||||
|
if [[ "${child}" == "share" ]]; then
|
||||||
|
copyifnotexists "${src}/${child}" "${dest}/${child}"
|
||||||
|
else
|
||||||
|
if [[ -d "${src}/${child}" ]]; then
|
||||||
|
mkdir -p "${dest}/${child}"
|
||||||
|
cp -pr "${src}/${child}"/* "${dest}/${child}"
|
||||||
|
else
|
||||||
|
cp -pr "${src}/${child}" "${dest}/${child}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# shellcheck disable=SC2164
|
||||||
|
ROOT=$(cd "${BASEDIR}"/../..;pwd)
|
||||||
|
echo
|
||||||
|
echo "Current directory $(pwd)"
|
||||||
|
echo
|
||||||
|
run rm -rf "ozone"
|
||||||
|
run mkdir "ozone"
|
||||||
|
run cd "ozone"
|
||||||
|
run cp -p "${ROOT}/LICENSE.txt" .
|
||||||
|
run cp -p "${ROOT}/NOTICE.txt" .
|
||||||
|
run cp -p "${ROOT}/README.txt" .
|
||||||
|
|
||||||
|
# Copy hadoop-common first so that it have always have all dependencies.
|
||||||
|
# Remaining projects will copy only libraries which are not present already in 'share' directory.
|
||||||
|
run copy "${ROOT}/hadoop-common-project/hadoop-common/target/hadoop-common-${VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs-client/target/hadoop-hdfs-client-${VERSION}" .
|
||||||
|
|
||||||
|
|
||||||
|
# HDDS
|
||||||
|
run copy "${ROOT}/hadoop-hdds/common/target/hadoop-hdds-common-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdds/framework/target/hadoop-hdds-server-framework-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdds/server-scm/target/hadoop-hdds-server-scm-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdds/container-service/target/hadoop-hdds-container-service-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdds/client/target/hadoop-hdds-client-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-hdds/tools/target/hadoop-hdds-tools-${HDDS_VERSION}" .
|
||||||
|
|
||||||
|
# Ozone
|
||||||
|
run copy "${ROOT}/hadoop-ozone/common/target/hadoop-ozone-common-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-ozone/ozone-manager/target/hadoop-ozone-ozone-manager-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-ozone/client/target/hadoop-ozone-client-${HDDS_VERSION}" .
|
||||||
|
run copy "${ROOT}/hadoop-ozone/tools/target/hadoop-ozone-tools-${HDDS_VERSION}" .
|
||||||
|
|
||||||
|
mkdir -p ./share/hadoop/mapreduce
|
||||||
|
mkdir -p ./share/hadoop/yarn
|
||||||
|
echo
|
||||||
|
echo "Hadoop Ozone dist layout available at: ${BASEDIR}/ozone-${HDDS_VERSION}"
|
||||||
|
echo
|
48
dev-support/bin/ozone-dist-tar-stitching
Executable file
48
dev-support/bin/ozone-dist-tar-stitching
Executable file
@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
# project.version
|
||||||
|
VERSION=$1
|
||||||
|
|
||||||
|
# project.build.directory
|
||||||
|
BASEDIR=$2
|
||||||
|
|
||||||
|
function run()
|
||||||
|
{
|
||||||
|
declare res
|
||||||
|
|
||||||
|
echo "\$ ${*}"
|
||||||
|
"${@}"
|
||||||
|
res=$?
|
||||||
|
if [[ ${res} != 0 ]]; then
|
||||||
|
echo
|
||||||
|
echo "Failed!"
|
||||||
|
echo
|
||||||
|
exit "${res}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
#To make the final dist directory easily mountable from docker we don't use
|
||||||
|
#version name in the directory name.
|
||||||
|
#To include the version name in the root directory of the tar file
|
||||||
|
# we create a symbolic link and dereference it during the tar creation
|
||||||
|
ln -s -f ozone ozone-${VERSION}
|
||||||
|
run tar -c --dereference -f "ozone-${VERSION}.tar" "ozone"
|
||||||
|
run gzip -f "ozone-${VERSION}.tar"
|
||||||
|
echo
|
||||||
|
echo "Ozone dist tar available at: ${BASEDIR}/ozone-${VERSION}.tar.gz"
|
||||||
|
echo
|
@ -13,8 +13,8 @@
|
|||||||
limitations under the License. See accompanying LICENSE file.
|
limitations under the License. See accompanying LICENSE file.
|
||||||
-->
|
-->
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<parent>
|
<parent>
|
||||||
@ -168,10 +168,13 @@
|
|||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<executable>${shell-executable}</executable>
|
<executable>${shell-executable}</executable>
|
||||||
<workingDirectory>${project.build.directory}</workingDirectory>
|
<workingDirectory>${project.build.directory}
|
||||||
|
</workingDirectory>
|
||||||
<requiresOnline>false</requiresOnline>
|
<requiresOnline>false</requiresOnline>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}/../dev-support/bin/dist-layout-stitching</argument>
|
<argument>
|
||||||
|
${basedir}/../dev-support/bin/dist-layout-stitching
|
||||||
|
</argument>
|
||||||
<argument>${project.version}</argument>
|
<argument>${project.version}</argument>
|
||||||
<argument>${project.build.directory}</argument>
|
<argument>${project.build.directory}</argument>
|
||||||
<argument>${hdds.version}</argument>
|
<argument>${hdds.version}</argument>
|
||||||
@ -182,14 +185,16 @@
|
|||||||
<id>toolshooks</id>
|
<id>toolshooks</id>
|
||||||
<phase>prepare-package</phase>
|
<phase>prepare-package</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<executable>${shell-executable}</executable>
|
<executable>${shell-executable}</executable>
|
||||||
<workingDirectory>${basedir}</workingDirectory>
|
<workingDirectory>${basedir}</workingDirectory>
|
||||||
<requiresOnline>false</requiresOnline>
|
<requiresOnline>false</requiresOnline>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}/../dev-support/bin/dist-tools-hooks-maker</argument>
|
<argument>
|
||||||
|
${basedir}/../dev-support/bin/dist-tools-hooks-maker
|
||||||
|
</argument>
|
||||||
<argument>${project.version}</argument>
|
<argument>${project.version}</argument>
|
||||||
<argument>${project.build.directory}</argument>
|
<argument>${project.build.directory}</argument>
|
||||||
<argument>${basedir}/../hadoop-tools</argument>
|
<argument>${basedir}/../hadoop-tools</argument>
|
||||||
@ -203,14 +208,16 @@
|
|||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<executable>${shell-executable}</executable>
|
<executable>${shell-executable}</executable>
|
||||||
<workingDirectory>${project.build.directory}</workingDirectory>
|
<workingDirectory>${project.build.directory}
|
||||||
<requiresOnline>false</requiresOnline>
|
</workingDirectory>
|
||||||
<arguments>
|
<requiresOnline>false</requiresOnline>
|
||||||
<argument>${basedir}/../dev-support/bin/dist-tar-stitching</argument>
|
<arguments>
|
||||||
<argument>${project.version}</argument>
|
<argument>${basedir}/../dev-support/bin/dist-tar-stitching
|
||||||
<argument>${project.build.directory}</argument>
|
</argument>
|
||||||
</arguments>
|
<argument>${project.version}</argument>
|
||||||
|
<argument>${project.build.directory}</argument>
|
||||||
|
</arguments>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
@ -218,14 +225,12 @@
|
|||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
|
|
||||||
<profile>
|
<profile>
|
||||||
<id>hdds</id>
|
<id>hdds</id>
|
||||||
<activation>
|
<activation>
|
||||||
<activeByDefault>false</activeByDefault>
|
<activeByDefault>false</activeByDefault>
|
||||||
</activation>
|
</activation>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-ozone-ozone-manager</artifactId>
|
<artifactId>hadoop-ozone-ozone-manager</artifactId>
|
||||||
@ -261,41 +266,86 @@
|
|||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-resources-plugin</artifactId>
|
<artifactId>maven-resources-plugin</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>copy-docker-compose</id>
|
<id>copy-docker-compose</id>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>copy-resources</goal>
|
<goal>copy-resources</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<phase>prepare-package</phase>
|
<phase>prepare-package</phase>
|
||||||
<configuration>
|
<configuration>
|
||||||
<outputDirectory>${project.build.directory}/compose</outputDirectory>
|
<outputDirectory>${project.build.directory}/compose
|
||||||
<resources>
|
</outputDirectory>
|
||||||
<resource>
|
<resources>
|
||||||
<directory>src/main/compose</directory>
|
<resource>
|
||||||
<filtering>true</filtering>
|
<directory>src/main/compose</directory>
|
||||||
</resource>
|
<filtering>true</filtering>
|
||||||
</resources>
|
</resource>
|
||||||
</configuration>
|
</resources>
|
||||||
</execution>
|
</configuration>
|
||||||
<execution>
|
</execution>
|
||||||
<id>copy-dockerfile</id>
|
<execution>
|
||||||
<goals>
|
<id>copy-dockerfile</id>
|
||||||
<goal>copy-resources</goal>
|
<goals>
|
||||||
</goals>
|
<goal>copy-resources</goal>
|
||||||
<phase>prepare-package</phase>
|
</goals>
|
||||||
<configuration>
|
<phase>prepare-package</phase>
|
||||||
<outputDirectory>${project.build.directory}</outputDirectory>
|
<configuration>
|
||||||
<resources>
|
<outputDirectory>${project.build.directory}</outputDirectory>
|
||||||
<resource>
|
<resources>
|
||||||
<directory>src/main/docker</directory>
|
<resource>
|
||||||
<filtering>true</filtering>
|
<directory>src/main/docker</directory>
|
||||||
</resource>
|
<filtering>true</filtering>
|
||||||
</resources>
|
</resource>
|
||||||
</configuration>
|
</resources>
|
||||||
</execution>
|
</configuration>
|
||||||
</executions>
|
</execution>
|
||||||
</plugin>
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>dist-ozone</id>
|
||||||
|
<phase>prepare-package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>exec</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<executable>${shell-executable}</executable>
|
||||||
|
<workingDirectory>${project.build.directory}
|
||||||
|
</workingDirectory>
|
||||||
|
<arguments>
|
||||||
|
<argument>
|
||||||
|
${basedir}/../dev-support/bin/ozone-dist-layout-stitching
|
||||||
|
</argument>
|
||||||
|
<argument>${project.version}</argument>
|
||||||
|
<argument>${project.build.directory}</argument>
|
||||||
|
<argument>${hdds.version}</argument>
|
||||||
|
</arguments>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
<execution>
|
||||||
|
<id>tar-ozone</id>
|
||||||
|
<phase>package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>exec</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<executable>${shell-executable}</executable>
|
||||||
|
<workingDirectory>${project.build.directory}
|
||||||
|
</workingDirectory>
|
||||||
|
<arguments>
|
||||||
|
<argument>${basedir}/../dev-support/bin/ozone-dist-tar-stitching
|
||||||
|
</argument>
|
||||||
|
<argument>${hdds.version}</argument>
|
||||||
|
<argument>${project.build.directory}</argument>
|
||||||
|
</arguments>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
|
@ -14,4 +14,4 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
VERSION=${project.version}
|
HDDS_VERSION=${hdds.version}
|
@ -20,7 +20,7 @@ services:
|
|||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
hostname: namenode
|
hostname: namenode
|
||||||
volumes:
|
volumes:
|
||||||
- ../..//hadoop-${VERSION}:/opt/hadoop
|
- ../../ozone:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9870:9870
|
- 9870:9870
|
||||||
environment:
|
environment:
|
||||||
@ -31,7 +31,7 @@ services:
|
|||||||
datanode:
|
datanode:
|
||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
volumes:
|
volumes:
|
||||||
- ../..//hadoop-${VERSION}:/opt/hadoop
|
- ../../ozone:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9864
|
- 9864
|
||||||
command: ["/opt/hadoop/bin/ozone","datanode"]
|
command: ["/opt/hadoop/bin/ozone","datanode"]
|
||||||
@ -40,7 +40,7 @@ services:
|
|||||||
ksm:
|
ksm:
|
||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
volumes:
|
volumes:
|
||||||
- ../..//hadoop-${VERSION}:/opt/hadoop
|
- ../../ozone:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9874:9874
|
- 9874:9874
|
||||||
environment:
|
environment:
|
||||||
@ -51,7 +51,7 @@ services:
|
|||||||
scm:
|
scm:
|
||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
volumes:
|
volumes:
|
||||||
- ../..//hadoop-${VERSION}:/opt/hadoop
|
- ../../ozone:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9876:9876
|
- 9876:9876
|
||||||
env_file:
|
env_file:
|
||||||
|
@ -112,7 +112,7 @@ static String getClientNamenodeAddress(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (port > 0) {
|
if (port > 0) {
|
||||||
return currentNnAddress;
|
return currentNnAddress;
|
||||||
} else {
|
} else {
|
||||||
// the port is missing or 0. Figure out real bind address later.
|
// the port is missing or 0. Figure out real bind address later.
|
||||||
return null;
|
return null;
|
||||||
|
@ -20,19 +20,29 @@ This project contains acceptance tests for ozone/hdds using docker-compose and [
|
|||||||
|
|
||||||
To run the acceptance tests, please activate the `ozone-acceptance-test` profile and do a full build.
|
To run the acceptance tests, please activate the `ozone-acceptance-test` profile and do a full build.
|
||||||
|
|
||||||
Typically you need a `mvn install -Phdds,ozone-acceptance-test,dist -DskipTests` for a build without unit tests but with acceptance test.
|
```
|
||||||
|
mvn clean install -Pdist -Phdds
|
||||||
|
cd hadoop-ozone/acceptance-test
|
||||||
|
mvn integration-test -Phdds,ozone-acceptance-test,dist -DskipTests
|
||||||
|
```
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
|
|
||||||
1. You need a hadoop build in hadoop-dist/target directory.
|
1. You need a hadoop build in hadoop-dist/target directory.
|
||||||
2. The `ozone-acceptance-test` could be activated with profile even if the unit tests are disabled.
|
2. The `ozone-acceptance-test` could be activated with profile even if the unit tests are disabled.
|
||||||
|
3. This method does not require the robot framework on path as jpython is used.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
You can run manually the robot tests with `robot` cli. (See robotframework docs to install it.)
|
You can also run manually the robot tests with `robot` cli.
|
||||||
|
(See robotframework docs to install it: http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#installation-instructions)
|
||||||
|
|
||||||
1. Go to the `src/test/robotframework`
|
In the dev-support directory we have two wrapper scripts to run robot framework with local robot cli
|
||||||
2. Execute `robot -v basedir:${PWD}/../../.. -v VERSION:3.2.0-SNAPSHOT .`
|
instead of calling it from maven.
|
||||||
|
|
||||||
You can also use select just one test with -t `"*testnamefragment*"`
|
It's useful during the development of the robot files as any robotframework cli
|
||||||
|
arguments could be used.
|
||||||
|
|
||||||
|
1. `dev-support/bin/robot.sh` is the simple wrapper. The .robot file should be used as an argument.
|
||||||
|
2. `dev-support/bin/robot-all.sh` will call the robot.sh with the main acceptance test directory,
|
||||||
|
which means all the acceptance tests will be executed.
|
||||||
|
18
hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
Executable file
18
hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
Executable file
@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
$DIR/robot.sh $DIR/../../src/test/robotframework/acceptance
|
38
hadoop-ozone/acceptance-test/dev-support/bin/robot.sh
Executable file
38
hadoop-ozone/acceptance-test/dev-support/bin/robot.sh
Executable file
@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
#basedir is the directory of the whole hadoop project. Used to calculate the
|
||||||
|
#exact path to the hadoop-dist project
|
||||||
|
BASEDIR=${DIR}/../../../..
|
||||||
|
|
||||||
|
if [ ! "$(which robot)" ] ; then
|
||||||
|
echo ""
|
||||||
|
echo "robot is not on your PATH."
|
||||||
|
echo ""
|
||||||
|
echo "Please install it according to the documentation:"
|
||||||
|
echo " http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#installation-instructions"
|
||||||
|
echo " (TLDR; most of the time you need: 'pip install robotframework')"
|
||||||
|
exit -1
|
||||||
|
fi
|
||||||
|
|
||||||
|
OZONEDISTDIR="$BASEDIR/hadoop-dist/target/ozone"
|
||||||
|
if [ ! -d "$OZONEDISTDIR" ]; then
|
||||||
|
echo "Ozone can't be found in the $OZONEDISTDIR."
|
||||||
|
echo "You may need a full build with -Phdds and -Pdist profiles"
|
||||||
|
exit -1
|
||||||
|
fi
|
||||||
|
robot -v basedir:$BASEDIR $@
|
@ -28,32 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|||||||
<description>Apache Hadoop Ozone Acceptance Tests</description>
|
<description>Apache Hadoop Ozone Acceptance Tests</description>
|
||||||
<name>Apache Hadoop Ozone Acceptance Tests</name>
|
<name>Apache Hadoop Ozone Acceptance Tests</name>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-resources-plugin</artifactId>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>copy-docker-compose</id>
|
|
||||||
<goals>
|
|
||||||
<goal>copy-resources</goal>
|
|
||||||
</goals>
|
|
||||||
<phase>process-test-resources</phase>
|
|
||||||
<configuration>
|
|
||||||
<outputDirectory>${project.build.directory}/compose
|
|
||||||
</outputDirectory>
|
|
||||||
<resources>
|
|
||||||
<resource>
|
|
||||||
<directory>src/test/compose</directory>
|
|
||||||
<filtering>true</filtering>
|
|
||||||
</resource>
|
|
||||||
</resources>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
<profiles>
|
<profiles>
|
||||||
<profile>
|
<profile>
|
||||||
<id>ozone-acceptance-test</id>
|
<id>ozone-acceptance-test</id>
|
||||||
@ -70,8 +44,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<variables>
|
<variables>
|
||||||
<variable>version:${project.version}</variable>
|
<variable>basedir:${project.basedir}/../..</variable>
|
||||||
<variable>basedir:${project.basedir}</variable>
|
|
||||||
</variables>
|
</variables>
|
||||||
<skip>false</skip>
|
<skip>false</skip>
|
||||||
<skipTests>false</skipTests>
|
<skipTests>false</skipTests>
|
||||||
|
@ -14,4 +14,4 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
HADOOPDIR=../../hadoop-dist/target/hadoop-${project.version}
|
OZONEDIR=../../../hadoop-dist/target/ozone
|
||||||
|
@ -20,7 +20,7 @@ services:
|
|||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
hostname: namenode
|
hostname: namenode
|
||||||
volumes:
|
volumes:
|
||||||
- ${HADOOPDIR}:/opt/hadoop
|
- ${OZONEDIR}:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9870
|
- 9870
|
||||||
environment:
|
environment:
|
||||||
@ -31,7 +31,7 @@ services:
|
|||||||
datanode:
|
datanode:
|
||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
volumes:
|
volumes:
|
||||||
- ${HADOOPDIR}:/opt/hadoop
|
- ${OZONEDIR}:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9864
|
- 9864
|
||||||
command: ["/opt/hadoop/bin/ozone","datanode"]
|
command: ["/opt/hadoop/bin/ozone","datanode"]
|
||||||
@ -41,7 +41,7 @@ services:
|
|||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
hostname: ksm
|
hostname: ksm
|
||||||
volumes:
|
volumes:
|
||||||
- ${HADOOPDIR}:/opt/hadoop
|
- ${OZONEDIR}:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9874
|
- 9874
|
||||||
environment:
|
environment:
|
||||||
@ -52,7 +52,7 @@ services:
|
|||||||
scm:
|
scm:
|
||||||
image: apache/hadoop-runner
|
image: apache/hadoop-runner
|
||||||
volumes:
|
volumes:
|
||||||
- ${HADOOPDIR}:/opt/hadoop
|
- ${OZONEDIR}:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9876
|
- 9876
|
||||||
env_file:
|
env_file:
|
||||||
|
@ -21,8 +21,7 @@ Suite Teardown Teardown Ozone Cluster
|
|||||||
|
|
||||||
*** Variables ***
|
*** Variables ***
|
||||||
${COMMON_REST_HEADER} -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE root"
|
${COMMON_REST_HEADER} -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE root"
|
||||||
${version}
|
${basedir}
|
||||||
|
|
||||||
*** Test Cases ***
|
*** Test Cases ***
|
||||||
|
|
||||||
Daemons are running without error
|
Daemons are running without error
|
||||||
@ -130,8 +129,8 @@ Execute on
|
|||||||
|
|
||||||
Run docker compose
|
Run docker compose
|
||||||
[arguments] ${command}
|
[arguments] ${command}
|
||||||
Set Environment Variable HADOOPDIR ${basedir}/../../hadoop-dist/target/hadoop-${version}
|
Set Environment Variable OZONEDIR ${basedir}/hadoop-dist/target/ozone
|
||||||
${rc} ${output} = Run And Return Rc And Output docker-compose -f ${basedir}/target/compose/docker-compose.yaml ${command}
|
${rc} ${output} = Run And Return Rc And Output docker-compose -f ${basedir}/hadoop-ozone/acceptance-test/src/test/compose/docker-compose.yaml ${command}
|
||||||
Log ${output}
|
Log ${output}
|
||||||
Should Be Equal As Integers ${rc} 0
|
Should Be Equal As Integers ${rc} 0
|
||||||
[return] ${rc} ${output}
|
[return] ${rc} ${output}
|
||||||
|
Loading…
Reference in New Issue
Block a user