HADOOP-10854. unit tests for the shell scripts (aw)

This commit is contained in:
Allen Wittenauer 2015-07-31 14:34:48 -07:00
parent 666cafca8d
commit a890a31529
32 changed files with 1988 additions and 31 deletions

View File

@ -14,6 +14,8 @@ Requirements:
* Jansson C XML parsing library ( if compiling libwebhdfs )
* Linux FUSE (Filesystem in Userspace) version 2.6 or above ( if compiling fuse_dfs )
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
* python (for releasedocs)
* bats (for shell code testing)
----------------------------------------------------------------------------------
The easiest way to get an environment with all the appropriate tools is by means
@ -106,7 +108,7 @@ Maven build goals:
* Clean : mvn clean [-Preleasedocs]
* Compile : mvn compile [-Pnative]
* Run tests : mvn test [-Pnative]
* Run tests : mvn test [-Pnative] [-Pshelltest]
* Create JAR : mvn package
* Run findbugs : mvn compile findbugs:findbugs
* Run checkstyle : mvn compile checkstyle:checkstyle

View File

@ -63,6 +63,14 @@ ENV FINDBUGS_HOME /opt/findbugs
RUN apt-get install -y cabal-install
RUN cabal update && cabal install shellcheck --global
#####
# bats
#####
RUN add-apt-repository ppa:duggan/bats --yes
RUN apt-get update -qq
RUN apt-get install -qq bats
# Fixing the Apache commons / Maven dependency problem under Ubuntu:
# See http://wiki.apache.org/commons/VfsProblems
RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .

View File

@ -51,6 +51,8 @@ Trunk (Unreleased)
HADOOP-7947. Validate XMLs if a relevant tool is available, when using
scripts (Kengo Seki via aw)
HADOOP-10854. unit tests for the shell scripts (aw)
IMPROVEMENTS
HADOOP-11203. Allow ditscp to accept bandwitdh in fraction MegaBytes

View File

@ -958,6 +958,39 @@
</build>
</profile>
<!-- profile to test shell code -->
<profile>
<id>shelltest</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>common-test-bats-driver</id>
<phase>process-test-classes</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<exec dir="src/test/scripts"
executable="bash"
failonerror="true">
<arg value="./run-bats.sh" />
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -358,6 +358,7 @@ function hadoop_import_shellprofiles
if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
hadoop_debug "shellprofiles: ${files1[*]}"
else
hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
fi
@ -368,7 +369,8 @@ function hadoop_import_shellprofiles
for i in "${files1[@]}" "${files2[@]}"
do
if [[ -n "${i}" ]]; then
if [[ -n "${i}"
&& -f "${i}" ]]; then
hadoop_debug "Profiles: importing ${i}"
. "${i}"
fi
@ -490,6 +492,26 @@ function hadoop_basic_init
export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
fi
if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
exit 1
fi
if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
exit 1
fi
if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
exit 1
fi
if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
exit 1
fi
HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
@ -670,7 +692,7 @@ function hadoop_common_slave_mode_execute
# to prevent loops
# Also remove --hostnames and --hosts along with arg values
local argsSize=${#argv[@]};
for (( i = 0; i < $argsSize; i++ ))
for (( i = 0; i < argsSize; i++ ))
do
if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
unset argv[$i]
@ -681,6 +703,10 @@ function hadoop_common_slave_mode_execute
unset argv[$i];
fi
done
if [[ ${QATESTMODE} = true ]]; then
echo "${argv[@]}"
return
fi
hadoop_connect_to_hosts -- "${argv[@]}"
}
@ -727,8 +753,12 @@ function hadoop_add_param
# delimited
#
if [[ ! ${!1} =~ $2 ]] ; then
# shellcheck disable=SC2086
eval $1="'${!1} $3'"
#shellcheck disable=SC2140
eval "$1"="'${!1} $3'"
if [[ ${!1:0:1} = ' ' ]]; then
#shellcheck disable=SC2140
eval "$1"="'${!1# }'"
fi
hadoop_debug "$1 accepted $3"
else
hadoop_debug "$1 declined $3"
@ -766,7 +796,8 @@ function hadoop_add_classpath
# for wildcard at end, we can
# at least check the dir exists
if [[ $1 =~ ^.*\*$ ]]; then
local mp=$(dirname "$1")
local mp
mp=$(dirname "$1")
if [[ ! -d "${mp}" ]]; then
hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
return 1
@ -825,7 +856,7 @@ function hadoop_add_colonpath
hadoop_debug "Prepend colonpath($1): $2"
else
# shellcheck disable=SC2086
eval $1+="'$2'"
eval $1+=":'$2'"
hadoop_debug "Append colonpath($1): $2"
fi
return 0
@ -864,11 +895,14 @@ function hadoop_add_javalibpath
## @return 1 = failure (doesn't exist or some other reason)
function hadoop_add_ldlibpath
{
local status
# specialized function for a common use case
hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
status=$?
# note that we export this
export LD_LIBRARY_PATH
return ${status}
}
## @description Add the common/core Hadoop components to the
@ -876,21 +910,29 @@ function hadoop_add_ldlibpath
## @audience private
## @stability evolving
## @replaceable yes
## @returns 1 on failure, may exit
## @returns 0 on success
function hadoop_add_common_to_classpath
{
#
# get all of the common jars+config in the path
#
if [[ -z "${HADOOP_COMMON_HOME}"
|| -z "${HADOOP_COMMON_DIR}"
|| -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
exit 1
fi
# developers
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
fi
if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
fi
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
}
@ -909,27 +951,27 @@ function hadoop_add_to_classpath_userpath
# set env-var HADOOP_USER_CLASSPATH_FIRST
# we'll also dedupe it, because we're cool like that.
#
local c
local array
local i
local j
let c=0
declare -a array
declare -i c=0
declare -i j
declare -i i
declare idx
if [[ -n "${HADOOP_CLASSPATH}" ]]; then
# I wonder if Java runs on VMS.
for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
array[$c]=$i
let c+=1
for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
array[${c}]=${idx}
((c=c+1))
done
let j=c-1
((j=c-1))
if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
for ((i=j; i>=0; i--)); do
for ((i=0; i<=j; i++)); do
hadoop_add_classpath "${array[$i]}" after
done
else
for ((i=0; i<=j; i++)); do
for ((i=j; i>=0; i--)); do
hadoop_add_classpath "${array[$i]}" before
done
fi
@ -951,18 +993,32 @@ function hadoop_os_tricks
Darwin)
if [[ -z "${JAVA_HOME}" ]]; then
if [[ -x /usr/libexec/java_home ]]; then
export JAVA_HOME="$(/usr/libexec/java_home)"
JAVA_HOME="$(/usr/libexec/java_home)"
export JAVA_HOME
else
export JAVA_HOME=/Library/Java/Home
JAVA_HOME=/Library/Java/Home
export JAVA_HOME
fi
fi
;;
Linux)
bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
# Newer versions of glibc use an arena memory allocator that
# causes virtual # memory usage to explode. This interacts badly
# with the many threads that we use in Hadoop. Tune the variable
# down to prevent vmem explosion.
export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
# we put this in QA test mode off so that non-Linux can test
if [[ "${QATESTMODE}" = true ]]; then
return
fi
# NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
# undocumented in hadoop-env.sh because we don't want users to
# shoot themselves in the foot while devs make IPv6 work.
bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
if [[ -n "${bindv6only}" ]] &&
[[ "${bindv6only}" -eq "1" ]] &&
[[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
@ -971,11 +1027,6 @@ function hadoop_os_tricks
hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
exit 1
fi
# Newer versions of glibc use an arena memory allocator that
# causes virtual # memory usage to explode. This interacts badly
# with the many threads that we use in Hadoop. Tune the variable
# down to prevent vmem explosion.
export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
;;
CYGWIN*)
# Flag that we're running on Cygwin to trigger path translation later.
@ -1019,7 +1070,7 @@ function hadoop_finalize_libpaths
if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
hadoop_add_param HADOOP_OPTS java.library.path \
"-Djava.library.path=${JAVA_LIBRARY_PATH}"
"-Djava.library.path=${JAVA_LIBRARY_PATH}"
export LD_LIBRARY_PATH
fi
}
@ -1168,6 +1219,7 @@ function hadoop_exit_with_usage
if [[ -z $exitcode ]]; then
exitcode=1
fi
# shellcheck disable=SC2034
if declare -F hadoop_usage >/dev/null ; then
hadoop_usage
elif [[ -x /usr/bin/cowsay ]]; then
@ -1464,6 +1516,7 @@ function hadoop_start_secure_daemon
hadoop_rotate_log "${daemonoutfile}"
hadoop_rotate_log "${daemonerrfile}"
# shellcheck disable=SC2153
jsvc="${JSVC_HOME}/jsvc"
if [[ ! -f "${jsvc}" ]]; then
hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
@ -1490,6 +1543,7 @@ function hadoop_start_secure_daemon
hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
fi
# shellcheck disable=SC2086
exec "${jsvc}" \
"-Dproc_${daemonname}" \
-outfile "${daemonoutfile}" \

View File

@ -0,0 +1,56 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
setup() {
TMP=../../../target/test-dir/bats.$$.${RANDOM}
mkdir -p ${TMP}
TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
export TMP
TESTBINDIR=$(cd -P -- "$(pwd)" >/dev/null && pwd -P)
HADOOP_LIBEXEC_DIR=${TESTBINDIR}/../../main/bin
HADOOP_LIBEXEC_DIR=$(cd -P -- "${HADOOP_LIBEXEC_DIR}" >/dev/null && pwd -P)
# shellcheck disable=SC2034
HADOOP_SHELL_SCRIPT_DEBUG=true
unset HADOOP_CONF_DIR
unset HADOOP_HOME
unset HADOOP_PREFIX
echo "bindir: ${TESTBINDIR}" 2>&1
mkdir -p "${TMP}"
# shellcheck disable=SC2034
QATESTMODE=true
. ../../main/bin/hadoop-functions.sh
pushd "${TMP}" >/dev/null
}
teardown() {
popd >/dev/null
rm -rf "${TMP}"
}
strstr() {
if [ "${1#*$2}" != "${1}" ]; then
echo true
else
echo false
fi
}

View File

@ -0,0 +1,100 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_add_classpath (simple not exist)" {
run hadoop_add_classpath testvar
[ "${status}" -eq 1 ]
}
@test "hadoop_add_classpath (simple wildcard not exist)" {
run hadoop_add_classpath testvar/*
[ "${status}" -eq 1 ]
}
@test "hadoop_add_classpath (simple exist)" {
run hadoop_add_classpath "${TMP}"
[ "${status}" -eq 0 ]
}
@test "hadoop_add_classpath (simple wildcard exist)" {
run hadoop_add_classpath "${TMP}/*"
[ "${status}" -eq 0 ]
}
@test "hadoop_add_classpath (simple dupecheck)" {
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "${TMP}/*"
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/*" ]
}
@test "hadoop_add_classpath (default order)" {
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "/tmp"
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
}
@test "hadoop_add_classpath (after order)" {
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "/tmp" after
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
}
@test "hadoop_add_classpath (before order)" {
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "/tmp" before
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "/tmp:${TMP}/*" ]
}
@test "hadoop_add_classpath (simple dupecheck 2)" {
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "/tmp"
hadoop_add_classpath "${TMP}/*"
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
}
@test "hadoop_add_classpath (dupecheck 3)" {
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "/tmp" before
hadoop_add_classpath "${TMP}/*"
hadoop_add_classpath "/tmp" after
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "/tmp:${TMP}/*" ]
}
@test "hadoop_add_classpath (complex ordering)" {
local j
local style="after"
# 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
for j in {1..5}; do
mkdir ${TMP}/${j}
hadoop_add_classpath "${TMP}/${j}" "${style}"
if [ "${style}" = "after" ]; then
style=before
else
style=after
fi
done
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
}

View File

@ -0,0 +1,96 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_add_colonpath (simple not exist)" {
run hadoop_add_colonpath testvar
[ "${status}" -eq 1 ]
}
@test "hadoop_add_colonpath (simple exist)" {
run hadoop_add_colonpath testvar "${TMP}"
[ "${status}" -eq 0 ]
}
@test "hadoop_add_colonpath (simple dupecheck)" {
set +e
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "${TMP}"
set -e
echo ">${testvar}<"
[ "${testvar}" = "${TMP}" ]
}
@test "hadoop_add_colonpath (default order)" {
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "/tmp"
echo ">${testvar}<"
[ "${testvar}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_colonpath (after order)" {
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "/tmp" after
echo ">${testvar}<"
[ "${testvar}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_colonpath (before order)" {
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "/tmp" before
echo ">${testvar}<"
[ "${testvar}" = "/tmp:${TMP}" ]
}
@test "hadoop_add_colonpath (simple dupecheck 2)" {
set +e
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "/tmp"
hadoop_add_colonpath testvar "${TMP}"
set -e
echo ">${testvar}<"
[ "${testvar}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_colonpath (dupecheck 3)" {
set +e
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "/tmp" before
hadoop_add_colonpath testvar "${TMP}"
hadoop_add_colonpath testvar "/tmp" after
set -e
echo ">${testvar}<"
[ "${testvar}" = "/tmp:${TMP}" ]
}
@test "hadoop_add_colonpath (complex ordering)" {
local j
local style="after"
# 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
for j in {1..5}; do
mkdir ${TMP}/${j}
hadoop_add_colonpath testvar "${TMP}/${j}" "${style}"
if [ "${style}" = "after" ]; then
style=before
else
style=after
fi
done
echo ">${testvar}<"
[ "${testvar}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
}

View File

@ -0,0 +1,71 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
freetheclasses () {
local j
for j in HADOOP_CLASSPATH \
HADOOP_ENABLE_BUILD_PATHS \
CLASSPATH HADOOP_COMMON_DIR \
HADOOP_COMMON_HOME \
HADOOP_COMMON_LIB_JARS_DIR \
HADOOP_ENABLE_BUILD_PATHS ; do
unset ${!j}
done
}
createdirs () {
local j
for j in hadoop-common/target/classes \
commondir/webapps commonlibjars ; do
mkdir -p "${TMP}/${j}"
touch "${TMP}/${j}/fake.jar"
done
HADOOP_COMMON_HOME=${TMP}
HADOOP_COMMON_DIR=commondir
HADOOP_COMMON_LIB_JARS_DIR=commonlibjars
}
@test "hadoop_add_common_to_classpath (negative)" {
freetheclasses
createdirs
unset HADOOP_COMMON_HOME
run hadoop_add_common_to_classpath
[ "${status}" -eq 1 ]
}
@test "hadoop_add_common_to_classpath (positive)" {
freetheclasses
createdirs
set +e
hadoop_add_common_to_classpath
set -e
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/commonlibjars/*:${TMP}/commondir/*" ]
}
@test "hadoop_add_common_to_classpath (build paths)" {
freetheclasses
createdirs
HADOOP_ENABLE_BUILD_PATHS=true
set +e
hadoop_add_common_to_classpath
set -e
echo ">${CLASSPATH}<"
[ "${CLASSPATH}" = "${TMP}/hadoop-common/target/classes:${TMP}/commonlibjars/*:${TMP}/commondir/*" ]
}

View File

@ -0,0 +1,98 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_add_javalibpath (simple not exist)" {
run hadoop_add_javalibpath "${TMP}/foo"
[ "${status}" -eq 1 ]
}
@test "hadoop_add_javalibpath (simple exist)" {
run hadoop_add_javalibpath "${TMP}"
[ "${status}" -eq 0 ]
}
@test "hadoop_add_javalibpath (simple dupecheck)" {
set +e
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "${TMP}"
set -e
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "${TMP}" ]
}
@test "hadoop_add_javalibpath (default order)" {
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "/tmp"
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_javalibpath (after order)" {
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "/tmp" after
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_javalibpath (before order)" {
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "/tmp" before
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "/tmp:${TMP}" ]
}
@test "hadoop_add_javalibpath (simple dupecheck 2)" {
set +e
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "/tmp"
hadoop_add_javalibpath "${TMP}"
set -e
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_javalibpath (dupecheck 3)" {
set +e
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "/tmp" before
hadoop_add_javalibpath "${TMP}"
hadoop_add_javalibpath "/tmp" after
set -e
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "/tmp:${TMP}" ]
}
@test "hadoop_add_javalibpath (complex ordering)" {
local j
local style="after"
# 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
for j in {1..5}; do
mkdir ${TMP}/${j}
hadoop_add_javalibpath "${TMP}/${j}" "${style}"
if [ "${style}" = "after" ]; then
style=before
else
style=after
fi
done
echo ">${JAVA_LIBRARY_PATH}<"
[ "${JAVA_LIBRARY_PATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
}

View File

@ -0,0 +1,97 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_add_ldlibpath (simple not exist)" {
run hadoop_add_ldlibpath ${TMP}/foo
[ "${status}" -eq 1 ]
}
@test "hadoop_add_ldlibpath (simple exist)" {
run hadoop_add_ldlibpath "${TMP}"
[ "${status}" -eq 0 ]
}
@test "hadoop_add_ldlibpath (simple dupecheck)" {
set +e
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "${TMP}"
set -e
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "${TMP}" ]
}
@test "hadoop_add_ldlibpath (default order)" {
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "/tmp"
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_ldlibpath (after order)" {
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "/tmp" after
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_ldlibpath (before order)" {
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "/tmp" before
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "/tmp:${TMP}" ]
}
@test "hadoop_add_ldlibpath (simple dupecheck 2)" {
set +e
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "/tmp"
hadoop_add_ldlibpath "${TMP}"
set -e
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
}
@test "hadoop_add_ldlibpath (dupecheck 3)" {
set +e
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "/tmp" before
hadoop_add_ldlibpath "${TMP}"
hadoop_add_ldlibpath "/tmp" after
set -e
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "/tmp:${TMP}" ]
}
@test "hadoop_add_ldlibpath (complex ordering)" {
local j
local style="after"
# 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
for j in {1..5}; do
mkdir ${TMP}/${j}
hadoop_add_ldlibpath "${TMP}/${j}" "${style}"
if [ "${style}" = "after" ]; then
style=before
else
style=after
fi
done
echo ">${LD_LIBRARY_PATH}<"
[ "${LD_LIBRARY_PATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
}

View File

@ -0,0 +1,49 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_add_param (positive 1)" {
hadoop_add_param testvar foo foo
echo ">${testvar}<"
[ "${testvar}" = "foo" ]
}
@test "hadoop_add_param (negative)" {
hadoop_add_param testvar foo foo
hadoop_add_param testvar foo foo
echo ">${testvar}<"
[ "${testvar}" = "foo" ]
}
@test "hadoop_add_param (positive 2)" {
hadoop_add_param testvar foo foo
hadoop_add_param testvar foo foo
hadoop_add_param testvar bar bar
echo ">${testvar}<"
[ "${testvar}" = "foo bar" ]
}
@test "hadoop_add_param (positive 3)" {
hadoop_add_param testvar foo foo
hadoop_add_param testvar foo foo
hadoop_add_param testvar bar bar
hadoop_add_param testvar bar bar
hadoop_add_param testvar baz baz
hadoop_add_param testvar baz baz
echo ">${testvar}<"
[ "${testvar}" = "foo bar baz" ]
}

View File

@ -0,0 +1,98 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
freetheclasses () {
local j
for j in HADOOP_CLASSPATH \
HADOOP_USE_CLIENT_CLASSLOADER \
HADOOP_USER_CLASSPATH_FIRST \
CLASSPATH; do
unset ${!j}
done
}
createdirs () {
local j
for j in new old foo bar baz; do
mkdir -p "${TMP}/${j}"
done
}
@test "hadoop_add_to_classpath_userpath (nothing)" {
freetheclasses
hadoop_add_to_classpath_userpath
[ -z "${CLASSPATH}" ]
}
@test "hadoop_add_to_classpath_userpath (none)" {
freetheclasses
CLASSPATH=test
hadoop_add_to_classpath_userpath
[ "${CLASSPATH}" = "test" ]
}
@test "hadoop_add_to_classpath_userpath (only)" {
freetheclasses
createdirs
HADOOP_CLASSPATH="${TMP}/new"
hadoop_add_to_classpath_userpath
[ "${CLASSPATH}" = "${TMP}/new" ]
}
@test "hadoop_add_to_classpath_userpath (classloader)" {
freetheclasses
createdirs
HADOOP_CLASSPATH="${TMP}/new"
HADOOP_USE_CLIENT_CLASSLOADER="true"
hadoop_add_to_classpath_userpath
[ -z "${CLASSPATH}" ]
}
@test "hadoop_add_to_classpath_userpath (1+1 dupe)" {
freetheclasses
createdirs
CLASSPATH=${TMP}/foo
HADOOP_CLASSPATH=${TMP}/foo
HADOOP_USER_CLASSPATH_FIRST=""
hadoop_add_to_classpath_userpath
echo ">${CLASSPATH}<"
[ ${CLASSPATH} = "${TMP}/foo" ]
}
@test "hadoop_add_to_classpath_userpath (3+2 after)" {
freetheclasses
createdirs
CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
HADOOP_CLASSPATH=${TMP}/new:${TMP}/old
HADOOP_USER_CLASSPATH_FIRST=""
hadoop_add_to_classpath_userpath
echo ">${CLASSPATH}<"
[ ${CLASSPATH} = "${TMP}/foo:${TMP}/bar:${TMP}/baz:${TMP}/new:${TMP}/old" ]
}
@test "hadoop_add_to_classpath_userpath (3+2 before)" {
freetheclasses
createdirs
CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
HADOOP_CLASSPATH=${TMP}/new:${TMP}/old
HADOOP_USER_CLASSPATH_FIRST="true"
hadoop_add_to_classpath_userpath
echo ">${CLASSPATH}<"
[ ${CLASSPATH} = "${TMP}/new:${TMP}/old:${TMP}/foo:${TMP}/bar:${TMP}/baz" ]
}

View File

@ -0,0 +1,94 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
basicinitsetup () {
local j
testvars="HADOOP_IDENT_STRING \
HADOOP_LOG_DIR \
HADOOP_LOGFILE \
HADOOP_LOGLEVEL \
HADOOP_NICENESS \
HADOOP_STOP_TIMEOUT \
HADOOP_PID_DIR \
HADOOP_ROOT_LOGGER \
HADOOP_DAEMON_ROOT_LOGGER \
HADOOP_SECURITY_LOGGER \
HADOOP_SSH_OPTS \
HADOOP_SECURE_LOG_DIR \
HADOOP_SECURE_PID_DIR \
HADOOP_SSH_PARALLEL"
dirvars="HADOOP_COMMON_HOME \
HADOOP_MAPRED_HOME \
HADOOP_HDFS_HOME \
HADOOP_YARN_HOME"
for j in ${testvars}; do
unset ${!j}
done
HADOOP_PREFIX=${TMP}
}
check_var_values () {
for j in ${testvars}; do
echo "Verifying ${j} has a value"
[ -n "${!j}" ]
done
}
@test "hadoop_basic_init (bad dir errors)" {
local j
local i
# we need to do these in the same order for
# the unit test, so that the tests are easier
# to write/test
basicinitsetup
for j in ${dirvars}; do
echo "testing ${j}"
i=${TMP}/${j}
mkdir -p "${i}"
#shellcheck disable=SC2086
eval ${j}=${i}
hadoop_basic_init
echo "Verifying $j has >${i}< >${!j}<"
[ ${!j} = ${i} ]
done
}
@test "hadoop_basic_init (no non-dir overrides)" {
basicinitsetup
hadoop_basic_init
check_var_values
}
@test "hadoop_basic_init (test non-dir overrides)" {
local j
for j in ${testvars}; do
basicinitsetup
echo testing ${j}
eval ${j}=foo
hadoop_basic_init
check_var_values
echo "Verifying $j has foo >${!j}<"
[ ${j} = foo ]
done
}

View File

@ -0,0 +1,51 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_deprecate_envvar (no libexec)" {
unset HADOOP_LIBEXEC_DIR
run hadoop_bootstrap
[ "${status}" -eq 1 ]
}
@test "hadoop_deprecate_envvar (libexec)" {
unset HADOOP_PREFIX
unset HADOOP_COMMON_DIR
unset HADOOP_COMMON_LIB_JARS_DIR
unset HDFS_DIR
unset HDFS_LIB_JARS_DIR
unset YARN_DIR
unset YARN_LIB_JARS_DIR
unset MAPRED_DIR
unset MAPRED_LIB_JARS_DIR
unset TOOL_PATH
unset HADOOP_OS_TYPE
hadoop_bootstrap
# all of these should be set
[ -n ${HADOOP_PREFIX} ]
[ -n ${HADOOP_COMMON_DIR} ]
[ -n ${HADOOP_COMMON_LIB_JARS_DIR} ]
[ -n ${HDFS_DIR} ]
[ -n ${HDFS_LIB_JARS_DIR} ]
[ -n ${YARN_DIR} ]
[ -n ${YARN_LIB_JARS_DIR} ]
[ -n ${MAPRED_DIR} ]
[ -n ${MAPRED_LIB_JARS_DIR} ]
[ -n ${TOOL_PATH} ]
[ -n ${HADOOP_OS_TYPE} ]
}

View File

@ -0,0 +1,92 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
create_fake_dirs () {
HADOOP_PREFIX=${TMP}
for j in conf etc/hadoop; do
mkdir -p "${HADOOP_PREFIX}/${j}"
echo "unittest=${j}" > "${HADOOP_PREFIX}/${j}/hadoop-env.sh"
done
}
@test "hadoop_find_confdir (default)" {
create_fake_dirs
hadoop_find_confdir
[ -n "${HADOOP_CONF_DIR}" ]
}
@test "hadoop_find_confdir (bw compat: conf)" {
create_fake_dirs
hadoop_find_confdir
echo ">${HADOOP_CONF_DIR}< >${HADOOP_PREFIX}/conf<"
[ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/conf ]
}
@test "hadoop_find_confdir (etc/hadoop)" {
create_fake_dirs
rm -rf "${HADOOP_PREFIX}/conf"
hadoop_find_confdir
[ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/etc/hadoop ]
}
@test "hadoop_verify_confdir (negative) " {
create_fake_dirs
HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
run hadoop_verify_confdir
[ -n "${output}" ]
}
@test "hadoop_verify_confdir (positive) " {
create_fake_dirs
HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
touch "${HADOOP_CONF_DIR}/log4j.properties"
run hadoop_verify_confdir
[ -z "${output}" ]
}
@test "hadoop_exec_hadoopenv (positive) " {
create_fake_dirs
HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
hadoop_exec_hadoopenv
[ -n "${HADOOP_ENV_PROCESSED}" ]
[ "${unittest}" = conf ]
}
@test "hadoop_exec_hadoopenv (negative) " {
create_fake_dirs
HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
HADOOP_ENV_PROCESSED=true
hadoop_exec_hadoopenv
[ -z "${unittest}" ]
}
@test "hadoop_exec_userfuncs" {
create_fake_dirs
HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
echo "unittest=userfunc" > "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
hadoop_exec_userfuncs
[ "${unittest}" = "userfunc" ]
}
@test "hadoop_exec_hadooprc" {
HOME=${TMP}
echo "unittest=hadooprc" > "${TMP}/.hadooprc"
hadoop_exec_hadooprc
[ ${unittest} = "hadooprc" ]
}

View File

@ -0,0 +1,32 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_deprecate_envvar (replace)" {
OLD=value1
NEW=value2
hadoop_deprecate_envvar OLD NEW
[ "${NEW}" = "${OLD}" ]
}
@test "hadoop_deprecate_envvar (no replace)" {
OLD=
NEW=value2
hadoop_deprecate_envvar OLD NEW
[ "${NEW}" = value2 ]
}

View File

@ -0,0 +1,206 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_finalize (shellprofiles)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { testvar=shell; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () { true; }
hadoop_finalize
[ "${testvar}" = "shell" ];
}
@test "hadoop_finalize (classpath)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { testvar=class; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () { true; }
hadoop_finalize
[ "${testvar}" = "class" ];
}
@test "hadoop_finalize (libpaths)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { testvar=libpaths; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () { true; }
hadoop_finalize
[ "${testvar}" = "libpaths" ];
}
@test "hadoop_finalize (heap)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { testvar=heap; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () { true; }
hadoop_finalize
[ "${testvar}" = "heap" ];
}
@test "hadoop_finalize (opts)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { testvar=opts; }
hadoop_translate_cygwin_path () { true; }
hadoop_finalize
[ "${testvar}" = "opts" ];
}
@test "hadoop_finalize (cygwin prefix)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () {
if [ $1 = HADOOP_PREFIX ]; then
testvar=prefix;
fi
}
hadoop_finalize
[ "${testvar}" = "prefix" ];
}
@test "hadoop_finalize (cygwin conf dir)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () {
if [ $1 = HADOOP_CONF_DIR ]; then
testvar=confdir;
fi
}
hadoop_finalize
[ "${testvar}" = "confdir" ];
}
@test "hadoop_finalize (cygwin common)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () {
if [ $1 = HADOOP_COMMON_HOME ]; then
testvar=common;
fi
}
hadoop_finalize
[ "${testvar}" = "common" ];
}
@test "hadoop_finalize (cygwin hdfs)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () {
if [ $1 = HADOOP_HDFS_HOME ]; then
testvar=hdfs;
fi
}
hadoop_finalize
[ "${testvar}" = "hdfs" ];
}
@test "hadoop_finalize (cygwin yarn)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () {
if [ $1 = HADOOP_YARN_HOME ]; then
testvar=yarn;
fi
}
hadoop_finalize
[ "${testvar}" = "yarn" ];
}
@test "hadoop_finalize (cygwin mapred)" {
HADOOP_IS_CYGWIN=false
hadoop_shellprofiles_finalize () { true; }
hadoop_finalize_classpath () { true; }
hadoop_finalize_libpaths () { true; }
hadoop_finalize_hadoop_heap () { true; }
hadoop_finalize_hadoop_opts () { true; }
hadoop_translate_cygwin_path () {
if [ $1 = HADOOP_MAPRED_HOME ]; then
testvar=mapred;
fi
}
hadoop_finalize
[ "${testvar}" = "mapred" ];
}

View File

@ -0,0 +1,56 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_finalize_catalina_opts (raw)" {
local j
HADOOP_IS_CYGWIN=false
HADOOP_CATALINA_PREFIX=test
CATALINA_OPTS=""
hadoop_finalize_catalina_opts
for j in test.home.dir \
test.config.dir \
test.log.dir \
test.admin.port \
test.http.port \
test.max.threads \
test.ssl.keystore.file; do
[ "${CATALINA_OPTS#*${j}}" != "${CATALINA_OPTS}" ]
done
}
@test "hadoop_finalize_catalina_opts (cygwin)" {
local j
skip "catalina commands not supported under cygwin yet"
HADOOP_IS_CYGWIN=true
HADOOP_CATALINA_PREFIX=test
CATALINA_OPTS=""
catalina_translate_cygwin_path () {
eval ${1}="foobarbaz"
}
hadoop_finalize_catalina_opts
for j in test.home.dir \
test.config.dir \
test.log.dir \
test.ssl.keystore.file; do
[ "${CATALINA_OPTS#*${j}=foobarbaz}" != "${CATALINA_OPTS}" ]
done
}

View File

@ -0,0 +1,64 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_finalize_classpath (only conf dir)" {
CLASSPATH=""
HADOOP_CONF_DIR="${TMP}"
hadoop_translate_cygwin_path () { true; }
hadoop_add_to_classpath_userpath () { true; }
hadoop_finalize_classpath
[ "${CLASSPATH}" = "${TMP}" ]
}
@test "hadoop_finalize_classpath (before conf dir)" {
CLASSPATH="1"
HADOOP_CONF_DIR="${TMP}"
hadoop_translate_cygwin_path () { true; }
hadoop_add_to_classpath_userpath () { true; }
hadoop_finalize_classpath
[ "${CLASSPATH}" = "${TMP}:1" ]
}
@test "hadoop_finalize_classpath (adds user)" {
CLASSPATH=""
HADOOP_CONF_DIR="${TMP}"
hadoop_translate_cygwin_path () { true; }
hadoop_add_to_classpath_userpath () { testvar=true; }
hadoop_finalize_classpath
[ "${testvar}" = "true" ]
}
@test "hadoop_finalize_classpath (calls cygwin)" {
CLASSPATH=""
HADOOP_CONF_DIR="${TMP}"
HADOOP_IS_CYGWIN=true
hadoop_translate_cygwin_path () { [ $1 = CLASSPATH ]; }
hadoop_add_to_classpath_userpath () { true; }
hadoop_finalize_classpath
}

View File

@ -0,0 +1,87 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
resetops () {
unset HADOOP_HEAPSIZE_MAX
unset HADOOP_HEAPSIZE
unset HADOOP_HEAPSIZE_MIN
unset HADOOP_OPTS
}
@test "hadoop_finalize_hadoop_heap (negative)" {
resetops
hadoop_finalize_hadoop_heap
[ -z "${HADOOP_OPTS}" ]
}
@test "hadoop_finalize_hadoop_heap (no unit max)" {
resetops
HADOOP_HEAPSIZE_MAX=1000
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xmx1000m" ]
}
@test "hadoop_finalize_hadoop_heap (no unit old)" {
resetops
HADOOP_HEAPSIZE=1000
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xmx1000m" ]
}
@test "hadoop_finalize_hadoop_heap (unit max)" {
resetops
HADOOP_HEAPSIZE_MAX=10g
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xmx10g" ]
}
@test "hadoop_finalize_hadoop_heap (unit old)" {
resetops
HADOOP_HEAPSIZE=10g
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xmx10g" ]
}
@test "hadoop_finalize_hadoop_heap (no unit min)" {
resetops
HADOOP_HEAPSIZE_MIN=1000
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xms1000m" ]
}
@test "hadoop_finalize_hadoop_heap (unit min)" {
resetops
HADOOP_HEAPSIZE_MIN=10g
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xms10g" ]
}
@test "hadoop_finalize_hadoop_heap (dedupe)" {
resetops
HADOOP_HEAPSIZE_MAX=1000
HADOOP_OPTS="-Xmx5g"
hadoop_finalize_hadoop_heap
hadoop_finalize_hadoop_heap
echo ">${HADOOP_OPTS}<"
[ "${HADOOP_OPTS}" = "-Xmx5g" ]
}

View File

@ -0,0 +1,52 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_finalize_hadoop_opts (raw)" {
local j
HADOOP_IS_CYGWIN=false
HADOOP_OPTS=""
hadoop_finalize_hadoop_opts
for j in hadoop.log.dir \
hadoop.log.file \
hadoop.home.dir \
hadoop.root.logger \
hadoop.policy.file \
hadoop.security.logger \
hadoop.id.str; do
[ "${HADOOP_OPTS#*${j}}" != "${HADOOP_OPTS}" ]
done
}
@test "hadoop_finalize_hadoop_opts (cygwin)" {
local j
HADOOP_IS_CYGWIN=true
HADOOP_OPTS=""
hadoop_translate_cygwin_path () {
eval ${1}="foobarbaz"
}
hadoop_finalize_hadoop_opts
for j in hadoop.log.dir \
hadoop.home.dir; do
echo "${j} from >${HADOOP_OPTS}<"
[ "${HADOOP_OPTS#*${j}=foobarbaz}" != "${HADOOP_OPTS}" ]
done
}

View File

@ -0,0 +1,30 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_finalize_libpaths (negative)" {
unset JAVA_LIBRARY_PATH
unset HADOOP_OPTS
hadoop_finalize_libpaths
[ -z "${HADOOP_OPTS}" ]
}
@test "hadoop_finalize_libpaths (positive)" {
JAVA_LIBRARY_PATH=test
unset HADOOP_OPTS
hadoop_finalize_libpaths
[ "${HADOOP_OPTS}" = "-Djava.library.path=test" ]
}

View File

@ -0,0 +1,47 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_java_setup (negative not set)" {
unset JAVA_HOME
run hadoop_java_setup
[ "${status}" -eq 1 ]
}
@test "hadoop_java_setup (negative not a dir)" {
touch ${TMP}/foo
JAVA_HOME="${TMP}/foo"
run hadoop_java_setup
[ "${status}" -eq 1 ]
}
@test "hadoop_java_setup (negative not exec)" {
mkdir -p "${TMP}/bin"
touch "${TMP}/bin/java"
JAVA_HOME="${TMP}"
chmod a-x "${TMP}/bin/java"
run hadoop_java_setup
[ "${status}" -eq 1 ]
}
@test "hadoop_java_setup (positive)" {
mkdir -p "${TMP}/bin"
touch "${TMP}/bin/java"
JAVA_HOME="${TMP}"
chmod a+x "${TMP}/bin/java"
run hadoop_java_setup
[ "${status}" -eq 0 ]
}

View File

@ -0,0 +1,34 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_os_tricks (cygwin sets cygwin)" {
HADOOP_OS_TYPE=CYGWIN-IS-GNU-USER-LAND
hadoop_os_tricks
[ "${HADOOP_IS_CYGWIN}" = "true" ]
}
@test "hadoop_os_tricks (linux sets arena max)" {
HADOOP_OS_TYPE=Linux
hadoop_os_tricks
[ -n "${MALLOC_ARENA_MAX}" ]
}
@test "hadoop_os_tricks (osx sets java_home)" {
HADOOP_OS_TYPE=Darwin
hadoop_os_tricks
[ -n "${JAVA_HOME}" ]
}

View File

@ -0,0 +1,52 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_rotate_log (defaults)" {
touch "${TMP}/log"
hadoop_rotate_log "${TMP}/log"
[ -f "${TMP}/log.1" ]
[ ! -f "${TMP}/log" ]
}
@test "hadoop_rotate_log (one archive log)" {
touch "${TMP}/log"
hadoop_rotate_log "${TMP}/log" 1
[ -f "${TMP}/log.1" ]
[ ! -f "${TMP}/log" ]
}
@test "hadoop_rotate_log (default five archive logs)" {
local i
for i in {1..5}; do
echo "Testing ${i}"
touch "${TMP}/log"
hadoop_rotate_log "${TMP}/log"
ls "${TMP}"
[ -f "${TMP}/log.${i}" ]
done
}
@test "hadoop_rotate_log (ten archive logs)" {
local i
for i in {1..10}; do
echo "Testing ${i}"
touch "${TMP}/log"
hadoop_rotate_log "${TMP}/log" 10
ls "${TMP}"
[ -f "${TMP}/log.${i}" ]
done
}

View File

@ -0,0 +1,91 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
shellprofilesetup () {
HADOOP_LIBEXEC_DIR="${TMP}/libexec"
HADOOP_CONF_DIR="${TMP}/conf"
mkdir -p "${HADOOP_LIBEXEC_DIR}/shellprofile.d" "${HADOOP_CONF_DIR}/shellprofile.d"
}
_test_hadoop_init () {
unittest=init
}
_test_hadoop_classpath () {
unittest=classpath
}
_test_hadoop_nativelib () {
unittest=nativelib
}
_test_hadoop_finalize () {
unittest=finalize
}
@test "hadoop_import_shellprofiles (negative)" {
shellprofilesetup
unset HADOOP_LIBEXEC_DIR
run hadoop_import_shellprofiles
[ -n "${output}" ]
}
@test "hadoop_import_shellprofiles (libexec sh import)" {
shellprofilesetup
echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.sh"
hadoop_import_shellprofiles
[ "${unittest}" = libexec ]
}
@test "hadoop_import_shellprofiles (libexec conf sh import+override)" {
shellprofilesetup
echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.sh"
echo "unittest=conf" > "${HADOOP_CONF_DIR}/shellprofile.d/test.sh"
hadoop_import_shellprofiles
[ "${unittest}" = conf ]
}
@test "hadoop_import_shellprofiles (libexec no cmd import)" {
shellprofilesetup
echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.cmd"
hadoop_import_shellprofiles
[ -z "${unittest}" ]
}
@test "hadoop_add_profile+hadoop_shellprofiles_init" {
hadoop_add_profile test
hadoop_shellprofiles_init
[ "${unittest}" = init ]
}
@test "hadoop_add_profile+hadoop_shellprofiles_classpath" {
hadoop_add_profile test
hadoop_shellprofiles_classpath
[ "${unittest}" = classpath ]
}
@test "hadoop_add_profile+hadoop_shellprofiles_nativelib" {
hadoop_add_profile test
hadoop_shellprofiles_nativelib
[ "${unittest}" = nativelib ]
}
@test "hadoop_add_profile+hadoop_shellprofiles_finalize" {
hadoop_add_profile test
hadoop_shellprofiles_finalize
[ "${unittest}" = finalize ]
}

View File

@ -0,0 +1,37 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_populate_slaves_file (specific file)" {
touch "${TMP}/file"
hadoop_populate_slaves_file "${TMP}/file"
[ "${HADOOP_SLAVES}" = "${TMP}/file" ]
}
@test "hadoop_populate_slaves_file (specific conf dir file)" {
HADOOP_CONF_DIR=${TMP}/1
mkdir -p "${HADOOP_CONF_DIR}"
touch "${HADOOP_CONF_DIR}/file"
hadoop_populate_slaves_file "file"
echo "${HADOOP_SLAVES}"
[ "${HADOOP_SLAVES}" = "${HADOOP_CONF_DIR}/file" ]
}
@test "hadoop_populate_slaves_file (no file)" {
HADOOP_CONF_DIR=${TMP}
run hadoop_populate_slaves_file "foo"
[ "${status}" -eq 1 ]
}

View File

@ -0,0 +1,51 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_actual_ssh" {
skip "Not implemented"
hadoop_actual_ssh
}
@test "hadoop_connect_to_hosts" {
skip "Not implemented"
hadoop_connect_to_hosts
}
@test "hadoop_connect_to_hosts_without_pdsh" {
skip "Not implemented"
hadoop_connect_to_hosts_without_pdsh
}
@test "hadoop_common_slave_mode_execute (--slaves 1)" {
run hadoop_common_slave_mode_execute --slaves command
[ ${output} = command ]
}
@test "hadoop_common_slave_mode_execute (--slaves 2)" {
run hadoop_common_slave_mode_execute --slaves command1 command2
[ ${output} = "command1 command2" ]
}
@test "hadoop_common_slave_mode_execute (--hosts)" {
run hadoop_common_slave_mode_execute --hosts filename command
[ ${output} = command ]
}
@test "hadoop_common_slave_mode_execute (--hostnames 2)" {
run hadoop_common_slave_mode_execute --hostnames "host1,host2" command1 command2
[ ${output} = "command1 command2" ]
}

View File

@ -0,0 +1,48 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_translate_cygwin_path (negative)" {
HADOOP_IS_CYGWIN=false
testvar="/this/path/is/cool"
hadoop_translate_cygwin_path testvar
[ "${testvar}" = "/this/path/is/cool" ]
}
@test "hadoop_translate_cygwin_path (positive)" {
HADOOP_IS_CYGWIN=true
testvar="/this/path/is/cool"
cygpath () {
echo "test"
}
hadoop_translate_cygwin_path testvar
[ "${testvar}" = "test" ]
}
@test "hadoop_translate_cygwin_path (path positive)" {
HADOOP_IS_CYGWIN=true
testvar="/this/path/is/cool"
cygpath () {
echo "test"
}
hadoop_translate_cygwin_path testvar true
[ "${testvar}" = "test" ]
}

View File

@ -0,0 +1,26 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_validate_classname (negative)" {
run hadoop_validate_classname fakeclass
[ ${status} -eq 1 ]
}
@test "hadoop_validate_classname (positive)" {
run hadoop_validate_classname org.apache.hadoop.io.Text
[ ${status} -eq 0 ]
}

View File

@ -0,0 +1,43 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
targetdir=../../../target
mkdir -p ${targetdir}/surefire-reports ${targetdir}/tap
batsexe=$(which bats) 2>/dev/null
if [[ -z ${batsexe} ]]; then
echo "not ok - no bats executable found" > "${targetdir}/tap/shelltest.tap"
echo ""
echo ""
echo "ERROR: bats not installed. Skipping bash tests."
echo "ERROR: Please install bats as soon as possible."
echo ""
echo ""
exit 0
fi
for j in *.bats; do
echo Running bats -t "${j}"
bats -t "${j}" 2>&1 | tee "${targetdir}/tap/${j}.tap"
result=${PIPESTATUS[0]}
((exitcode=exitcode+result))
done
if [[ ${exitcode} -gt 0 ]]; then
exit 1
fi
exit 0