4.0.0
org.apache.hadoop
hadoop-project
3.0.0-SNAPSHOT
../hadoop-project
org.apache.hadoop
hadoop-dist
3.0.0-SNAPSHOT
Apache Hadoop Distribution
Apache Hadoop Distribution
jar
org.apache.hadoop
hadoop-common
provided
org.apache.hadoop
hadoop-hdfs
provided
org.apache.hadoop
hadoop-mapreduce-client-app
provided
org.apache.hadoop
hadoop-yarn-api
provided
maven-deploy-plugin
true
org.apache.rat
apache-rat-plugin
dist
false
tar
org.apache.maven.plugins
maven-antrun-plugin
dist
prepare-package
run
run() {
echo "\$ ${@}"
"${@}"
res=$?
if [ $res != 0 ]; then
echo
echo "Failed!"
echo
exit $res
fi
}
findFileInDir(){
local file="$1";
local dir="${2:-./share}";
local count=$(find "$dir" -iname "$file"|wc -l)
echo "$count";
}
copyIfNotExists(){
local src="$1"
local srcName=$(basename "$src")
local dest="$2";
if [ -f "$src" ]; then
if [[ "$srcName" != *.jar ]] || [ $(findFileInDir "$srcName") -eq "0" ]; then
local destDir=$(dirname "$dest")
mkdir -p "$destDir"
cp "$src" "$dest"
fi
else
for childPath in "$src"/* ;
do
child=$(basename "$childPath");
if [ "$child" == "doc" ] || [ "$child" == "webapps" ]; then
mkdir -p "$dest"/"$child"
cp -r "$src"/"$child"/* "$dest"/"$child"
continue;
fi
copyIfNotExists "$src"/"$child" "$dest"/"$child"
done
fi
}
#Copy all contents as is except the lib.
#for libs check for existence in share directory, if not exist then only copy.
copy(){
local src="$1";
local dest="$2";
if [ -d "$src" ]; then
for childPath in "$src"/* ;
do
child=$(basename "$childPath");
if [ "$child" == "share" ]; then
copyIfNotExists "$src"/"$child" "$dest"/"$child"
else
if [ -d "$src"/"$child" ]; then
mkdir -p "$dest"/"$child"
cp -r "$src"/"$child"/* "$dest"/"$child"
else
cp -r "$src"/"$child" "$dest"/"$child"
fi
fi
done
fi
}
# Shellcheck SC2086
ROOT=$(cd "${project.build.directory}"/../..;pwd)
echo
echo "Current directory $(pwd)"
echo
run rm -rf hadoop-${project.version}
run mkdir hadoop-${project.version}
run cd hadoop-${project.version}
run cp "$ROOT"/LICENSE.txt .
run cp "$ROOT"/NOTICE.txt .
run cp "$ROOT"/README.txt .
# Copy hadoop-common first so that it have always have all dependencies.
# Remaining projects will copy only libraries which are not present already in 'share' directory.
run copy "$ROOT"/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version} .
run copy "$ROOT"/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version} .
run copy "$ROOT"/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version} .
run copy "$ROOT"/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version} .
run copy "$ROOT"/hadoop-yarn-project/target/hadoop-yarn-project-${project.version} .
run copy "$ROOT"/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version} .
run copy "$ROOT"/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version} .
#copy httpfs and kms as is
run cp -r "$ROOT"/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
run cp -r "$ROOT"/hadoop-common-project/hadoop-kms/target/hadoop-kms-${project.version}/* .
echo
echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
echo
tar
package
run
run() {
echo "\$ ${@}"
"${@}"
res=$?
if [ $res != 0 ]; then
echo
echo "Failed!"
echo
exit $res
fi
}
run tar cf hadoop-${project.version}.tar hadoop-${project.version}
run gzip -f hadoop-${project.version}.tar
echo
echo "Hadoop dist tar available at: ${project.build.directory}/hadoop-${project.version}.tar.gz"
echo