dbecbe5dfe
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1159166 13f79535-47bb-0310-9956-ffa450edef68
2515 lines
110 KiB
XML
2515 lines
110 KiB
XML
<?xml version="1.0"?>
|
|
|
|
<!--
|
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
contributor license agreements. See the NOTICE file distributed with
|
|
this work for additional information regarding copyright ownership.
|
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
(the "License"); you may not use this file except in compliance with
|
|
the License. You may obtain a copy of the License at
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
See the License for the specific language governing permissions and
|
|
limitations under the License.
|
|
-->
|
|
|
|
<project name="Hadoop" default="compile"
|
|
xmlns:artifact="urn:maven-artifact-ant"
|
|
xmlns:ivy="antlib:org.apache.ivy.ant">
|
|
|
|
<import file="build-utils.xml" />
|
|
|
|
<!-- Load all the default properties, and any the user wants -->
|
|
<!-- to contribute (without having to type -D or edit this file -->
|
|
<property file="${user.home}/build.properties" />
|
|
<property file="${basedir}/build.properties" />
|
|
|
|
<property name="module" value="mapreduce"/>
|
|
<property name="Name" value="Hadoop-Mapred"/>
|
|
<property name="name" value="hadoop-${module}"/>
|
|
<!-- Need to change aop.xml project.version prop. synchronously -->
|
|
<property name="_version" value="0.23.0"/>
|
|
<property name="version" value="${_version}-SNAPSHOT"/>
|
|
<property name="final.name" value="${name}-${version}"/>
|
|
<property name="test.final.name" value="${name}-test-${version}"/>
|
|
<property name="examples.final.name" value="${name}-examples-${version}"/>
|
|
<property name="tools.final.name" value="${name}-tools-${version}"/>
|
|
<property name="year" value="2009"/>
|
|
<property name="package.release" value="1"/>
|
|
|
|
<property name="src.dir" value="${basedir}/src"/>
|
|
<property name="mapred.src.dir" value="${src.dir}/java"/>
|
|
<property name="examples.dir" value="${basedir}/src/examples"/>
|
|
<property name="lib.dir" value="${basedir}/lib"/>
|
|
<property name="conf.dir" value="${basedir}/conf"/>
|
|
<property name="contrib.dir" value="${basedir}/src/contrib"/>
|
|
<property name="docs.src" value="${basedir}/src/docs"/>
|
|
<property name="changes.src" value="${docs.src}/changes"/>
|
|
<property name="c++.src" value="${basedir}/src/c++"/>
|
|
<property name="c++.utils.src" value="${c++.src}/utils"/>
|
|
<property name="c++.pipes.src" value="${c++.src}/pipes"/>
|
|
<property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
|
|
<property name="librecordio.src" value="${c++.src}/librecordio"/>
|
|
<property name="tools.src" value="${basedir}/src/tools"/>
|
|
|
|
<property name="xercescroot" value=""/>
|
|
<property name="build.dir" value="${basedir}/build"/>
|
|
<property name="build.classes" value="${build.dir}/classes"/>
|
|
<property name="build.src" value="${build.dir}/src"/>
|
|
<property name="build.tools" value="${build.dir}/tools"/>
|
|
<property name="build.webapps" value="${build.dir}/webapps"/>
|
|
<property name="build.examples" value="${build.dir}/examples"/>
|
|
<property name="build.librecordio" value="${build.dir}/librecordio"/>
|
|
|
|
<!-- convert spaces to _ so that mac os doesn't break things -->
|
|
<exec executable="tr" inputstring="${os.name}"
|
|
outputproperty="nonspace.os">
|
|
<arg value="[:space:]"/>
|
|
<arg value="_"/>
|
|
</exec>
|
|
<property name="build.platform"
|
|
value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
|
|
<property name="jvm.arch"
|
|
value="${sun.arch.data.model}"/>
|
|
<property name="build.native" value="${build.dir}/native/${build.platform}"/>
|
|
<property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
|
|
<property name="build.c++.utils" value="${build.c++}/utils"/>
|
|
<property name="build.c++.pipes" value="${build.c++}/pipes"/>
|
|
<property name="build.c++.examples.pipes"
|
|
value="${build.c++}/examples/pipes"/>
|
|
<property name="build.docs" value="${build.dir}/docs"/>
|
|
<property name="build.javadoc" value="${build.docs}/api"/>
|
|
<property name="build.javadoc.timestamp" value="${build.javadoc}/index.html" />
|
|
<property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
|
|
<property name="build.encoding" value="ISO-8859-1"/>
|
|
<property name="install.c++" value="${build.dir}/c++/${build.platform}"/>
|
|
<property name="install.c++.examples"
|
|
value="${build.dir}/c++-examples/${build.platform}"/>
|
|
|
|
<property environment="env"/>
|
|
<property name="test.src.dir" value="${basedir}/src/test"/>
|
|
<property name="test.lib.dir" value="${basedir}/src/test/lib"/>
|
|
<property name="test.build.dir" value="${build.dir}/test"/>
|
|
<property name="test.generated.dir" value="${test.build.dir}/src"/>
|
|
<property name="test.build.data" value="${test.build.dir}/data"/>
|
|
<property name="test.cache.data" value="${test.build.dir}/cache"/>
|
|
<property name="test.concat.data" value="${test.build.dir}/concat"/>
|
|
<property name="test.debug.data" value="${test.build.dir}/debug"/>
|
|
<property name="test.log.dir" value="${test.build.dir}/logs"/>
|
|
<property name="test.build.classes" value="${test.build.dir}/classes"/>
|
|
<property name="test.mapred.build.testjar" value="${test.build.dir}/mapred/testjar"/>
|
|
<property name="test.mapred.build.testshell" value="${test.build.dir}/mapred/testshell"/>
|
|
<property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
|
|
<property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
|
|
<property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
|
|
<property name="test.include" value="Test*"/>
|
|
<property name="test.classpath.id" value="test.classpath"/>
|
|
<property name="test.output" value="no"/>
|
|
<property name="test.timeout" value="900000"/>
|
|
<property name="test.junit.output.format" value="plain"/>
|
|
<property name="test.junit.fork.mode" value="perTest" />
|
|
<property name="test.junit.printsummary" value="yes" />
|
|
<property name="test.junit.haltonfailure" value="no" />
|
|
<property name="test.junit.maxmemory" value="512m" />
|
|
<property name="test.junit.jvmargs" value="-ea" />
|
|
<property name="test.tmp.dir" value="${env.TMP}" />
|
|
<property name="test.temp.dir" value="${env.TEMP}" />
|
|
<property name="test.conf.dir" value="${build.dir}/test/conf" />
|
|
|
|
<property name="test.tools.input.dir" value="${basedir}/src/test/tools/data" />
|
|
|
|
<property name="test.mapred.build.classes" value="${test.build.dir}/mapred/classes"/>
|
|
<property name="test.mapred.commit.tests.file" value="${test.src.dir}/commit-tests" />
|
|
<property name="test.mapred.smoke.tests.file" value="${test.src.dir}/smoke-tests" />
|
|
<property name="test.mapred.all.tests.file" value="${test.src.dir}/all-tests" />
|
|
<property name="test.exclude.file" value="${test.src.dir}/empty-file" />
|
|
|
|
<property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/>
|
|
<property name="web.src.dir" value="${basedir}/src/web"/>
|
|
<property name="src.webapps" value="${basedir}/src/webapps"/>
|
|
|
|
<property name="javadoc.link.java"
|
|
value="http://java.sun.com/javase/6/docs/api/"/>
|
|
<property name="javadoc.packages" value="org.apache.hadoop.*"/>
|
|
<property name="javadoc.maxmemory" value="512m" />
|
|
|
|
<property name="dist.dir" value="${build.dir}/${final.name}"/>
|
|
|
|
<property name="javac.debug" value="on"/>
|
|
<property name="javac.optimize" value="on"/>
|
|
<property name="javac.deprecation" value="off"/>
|
|
<property name="javac.version" value="1.6"/>
|
|
<property name="javac.args" value=""/>
|
|
<property name="javac.args.warnings" value="-Xlint:unchecked"/>
|
|
|
|
<property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
|
|
<property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
|
|
|
|
<property name="rat.reporting.classname" value="rat.Report"/>
|
|
|
|
<property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
|
|
<property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
|
|
<property name="jdiff.stability" value="-unstable"/>
|
|
<property name="jdiff.compatibility" value=""/>
|
|
<property name="jdiff.stable" value="0.21.0"/>
|
|
<property name="jdiff.stable.javadoc"
|
|
value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
|
|
|
|
<property name="scratch.dir" value="${user.home}/tmp"/>
|
|
<property name="svn.cmd" value="svn"/>
|
|
<property name="grep.cmd" value="grep"/>
|
|
<property name="patch.cmd" value="patch"/>
|
|
<property name="make.cmd" value="make"/>
|
|
|
|
<property name="findbugs.heap.size" value="512M"/>
|
|
<!-- task-controller properties set here -->
|
|
<!-- Source directory from where configure is run and files are copied
|
|
-->
|
|
|
|
<property name="c++.task-controller.src"
|
|
value="${basedir}/src/c++/task-controller" />
|
|
<!-- directory where autoconf files + temporary files and src is
|
|
stored for compilation -->
|
|
<property name="build.c++.task-controller"
|
|
value="${build.c++}/task-controller" />
|
|
<!-- the default install dir is build directory override it using
|
|
-Dtask-controller.install.dir=$HADOOP_HOME/bin -->
|
|
<property name="task-controller.install.dir" value="${dist.dir}/bin" />
|
|
<!-- end of task-controller properties -->
|
|
|
|
<!-- IVY properteis set here -->
|
|
<property name="ivy.dir" location="ivy" />
|
|
<loadproperties srcfile="${ivy.dir}/libraries.properties"/>
|
|
<property name="mvn.repo" value="http://repo2.maven.org/maven2"/>
|
|
<property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
|
|
<property name="asfrepo" value="https://repository.apache.org"/>
|
|
<property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
|
|
<property name="asfstagingrepo"
|
|
value="${asfrepo}/service/local/staging/deploy/maven2"/>
|
|
<property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
|
|
<property name="ant_task_repo_url" value="${mvn.repo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
|
|
<property name="ivy_repo_url" value="${mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
|
|
<property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
|
|
<property name="ivy.org" value="org.apache.hadoop"/>
|
|
<property name="build.dir" location="build" />
|
|
<property name="dist.dir" value="${build.dir}/${final.name}"/>
|
|
<property name="build.ivy.dir" location="${build.dir}/ivy" />
|
|
<property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
|
|
<property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
|
|
<property name="mapred.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/mapred"/>
|
|
<property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
|
|
<property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
|
|
<property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/hadoop-mapred-${version}.pom" />
|
|
<property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/hadoop-mapred-${version}.jar" />
|
|
<property name="hadoop-mapred.pom" location="${ivy.dir}/hadoop-mapred.xml"/>
|
|
<property name="hadoop-mapred-test.pom" location="${ivy.dir}/hadoop-mapred-test.xml"/>
|
|
<property name="hadoop-mapred-examples.pom" location="${ivy.dir}/hadoop-mapred-examples.xml"/>
|
|
<property name="hadoop-mapred-tools.pom" location="${ivy.dir}/hadoop-mapred-tools.xml"/>
|
|
|
|
<!--this is the naming policy for artifacts we want pulled down-->
|
|
<property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
|
|
|
|
<!--this is how artifacts that get built are named-->
|
|
<property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/>
|
|
<property name="hadoop-mapred.jar" location="${build.dir}/${final.name}.jar" />
|
|
<property name="hadoop-mapred-test.jar" location="${build.dir}/${test.final.name}.jar" />
|
|
<property name="hadoop-mapred-examples.jar" location="${build.dir}/${examples.final.name}.jar" />
|
|
<property name="hadoop-mapred-tools.jar" location="${build.dir}/${tools.final.name}.jar" />
|
|
<property name="hadoop-mapred-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
|
|
<property name="hadoop-mapred-sources.jar" location="${build.dir}/${final.name}-sources.jar" />
|
|
<property name="hadoop-mapred-test-sources.jar" location="${build.dir}/${test.final.name}-sources.jar" />
|
|
<property name="hadoop-mapred-examples-sources.jar" location="${build.dir}/${examples.final.name}-sources.jar" />
|
|
<property name="hadoop-mapred-tools-sources.jar" location="${build.dir}/${tools.final.name}-sources.jar" />
|
|
|
|
<!-- jdiff.home property set -->
|
|
<property name="jdiff.home" value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
|
|
<property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/>
|
|
<property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/>
|
|
|
|
<!-- Eclipse properties -->
|
|
<property name="build.dir.eclipse" value="${build.dir}/eclipse"/>
|
|
<property name="build.dir.eclipse-main-classes" value="${build.dir.eclipse}/classes-main"/>
|
|
<property name="build.dir.eclipse-main-generated-classes" value="${build.dir.eclipse}/classes-main-generated"/>
|
|
<property name="build.dir.eclipse-test-classes" value="${build.dir.eclipse}/classes-test"/>
|
|
<property name="build.dir.eclipse-example-classes" value="${build.dir.eclipse}/classes-example"/>
|
|
<property name="build.dir.eclipse-tools-classes" value="${build.dir.eclipse}/classes-tools"/>
|
|
<property name="build.dir.eclipse-contrib-classes" value="${build.dir.eclipse}/classes-contrib"/>
|
|
|
|
<property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
|
|
<available property="clover.present" file="${clover.jar}" />
|
|
|
|
<!-- check if clover reports should be generated -->
|
|
<condition property="clover.enabled">
|
|
<and>
|
|
<isset property="run.clover"/>
|
|
<isset property="clover.present"/>
|
|
</and>
|
|
</condition>
|
|
|
|
<condition property="staging">
|
|
<equals arg1="${repo}" arg2="staging"/>
|
|
</condition>
|
|
|
|
<!-- packaging properties -->
|
|
<property name="package.prefix" value="/usr"/>
|
|
<property name="package.conf.dir" value="/etc/hadoop"/>
|
|
<property name="package.log.dir" value="/var/log/hadoop/mapred"/>
|
|
<property name="package.pid.dir" value="/var/run/hadoop"/>
|
|
<property name="package.var.dir" value="/var/lib/hadoop"/>
|
|
<property name="package.share.dir" value="/share/hadoop/${module}"/>
|
|
<!-- Use fixed path to build rpm for avoiding rpmbuild conflict with dash path names -->
|
|
<property name="package.buildroot" value="/tmp/hadoop_mapred_package_build_${user.name}"/>
|
|
<property name="package.build.dir" value="/tmp/hadoop_mapred_package_build_${user.name}/BUILD"/>
|
|
|
|
<!-- the normal classpath -->
|
|
<path id="classpath">
|
|
<pathelement location="${build.classes}"/>
|
|
<pathelement location="${conf.dir}"/>
|
|
<path refid="ivy-mapred.classpath"/>
|
|
</path>
|
|
|
|
<path id="test.classpath">
|
|
<pathelement location="${test.build.extraconf}"/>
|
|
<pathelement location="${test.mapred.build.classes}" />
|
|
<pathelement location="${test.src.dir}"/>
|
|
<pathelement location="${build.dir}"/>
|
|
<pathelement location="${build.examples}"/>
|
|
<pathelement location="${build.tools}"/>
|
|
<pathelement path="${clover.jar}"/>
|
|
<pathelement location="${build.classes}"/>
|
|
<pathelement location="${test.conf.dir}"/>
|
|
<path refid="ivy-common.classpath"/>
|
|
<path refid="ivy-mapred.classpath"/>
|
|
<pathelement location="${test.mapred.build.classes}" />
|
|
<path refid="ivy-test.classpath"/>
|
|
</path>
|
|
|
|
<!-- the cluster test classpath: uses conf.dir for configuration -->
|
|
<path id="test.cluster.classpath">
|
|
<path refid="classpath"/>
|
|
<pathelement location="${test.build.classes}" />
|
|
<pathelement location="${test.src.dir}"/>
|
|
<pathelement location="${build.dir}"/>
|
|
</path>
|
|
|
|
|
|
<!-- ====================================================== -->
|
|
<!-- Macro definitions -->
|
|
<!-- ====================================================== -->
|
|
<macrodef name="macro_tar" description="Worker Macro for tar">
|
|
<attribute name="param.destfile"/>
|
|
<element name="param.listofitems"/>
|
|
<sequential>
|
|
<tar compression="gzip" longfile="gnu"
|
|
destfile="@{param.destfile}">
|
|
<param.listofitems/>
|
|
</tar>
|
|
</sequential>
|
|
</macrodef>
|
|
|
|
<!-- ====================================================== -->
|
|
<!-- Stuff needed by all targets -->
|
|
<!-- ====================================================== -->
|
|
<target name="init" depends="ivy-retrieve-common,ivy-retrieve-mapred">
|
|
<mkdir dir="${build.dir}"/>
|
|
<mkdir dir="${build.classes}"/>
|
|
<mkdir dir="${build.tools}"/>
|
|
<mkdir dir="${build.src}"/>
|
|
<mkdir dir="${build.webapps}/task/WEB-INF"/>
|
|
<mkdir dir="${build.webapps}/job/WEB-INF"/>
|
|
<mkdir dir="${build.examples}"/>
|
|
<mkdir dir="${build.dir}/c++"/>
|
|
|
|
<mkdir dir="${test.build.dir}"/>
|
|
<mkdir dir="${test.build.classes}"/>
|
|
<mkdir dir="${test.build.extraconf}"/>
|
|
<tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
|
|
<touch millis="0" file="${touch.temp.file}">
|
|
<fileset dir="${conf.dir}" includes="**/*.template"/>
|
|
<fileset dir="${contrib.dir}" includes="**/*.template"/>
|
|
</touch>
|
|
<delete file="${touch.temp.file}"/>
|
|
|
|
<!-- copy all of the jsp and static files -->
|
|
<copy todir="${build.webapps}">
|
|
<fileset dir="${src.webapps}">
|
|
<exclude name="**/*.jsp" />
|
|
<exclude name="**/*.jspx" />
|
|
</fileset>
|
|
</copy>
|
|
|
|
<unzip src="${common.ivy.lib.dir}/hadoop-hdfs-${hadoop-hdfs.version}.jar"
|
|
dest="${build.dir}">
|
|
<patternset>
|
|
<include name="webapps/hdfs/**"/>
|
|
<include name="webapps/datanode/**"/>
|
|
<include name="webapps/secondary/**"/>
|
|
</patternset>
|
|
</unzip>
|
|
|
|
<copy todir="${conf.dir}" verbose="true">
|
|
<fileset dir="${conf.dir}" includes="**/*.template"/>
|
|
<mapper type="glob" from="*.template" to="*"/>
|
|
</copy>
|
|
|
|
<mkdir dir="${test.conf.dir}"/>
|
|
<copy todir="${test.conf.dir}" verbose="true">
|
|
<fileset dir="${conf.dir}" includes="**/*.template"/>
|
|
<mapper type="glob" from="*.template" to="*"/>
|
|
</copy>
|
|
|
|
<copy todir="${contrib.dir}" verbose="true">
|
|
<fileset dir="${contrib.dir}" includes="**/*.template"/>
|
|
<mapper type="glob" from="*.template" to="*"/>
|
|
</copy>
|
|
|
|
</target>
|
|
|
|
<target name="avro-generate" depends="init">
|
|
<mkdir dir="${build.src}/org/apache/hadoop/mapreduce/jobhistory"/>
|
|
<taskdef name="protocol" classname="org.apache.avro.specific.ProtocolTask">
|
|
<classpath refid="classpath" />
|
|
</taskdef>
|
|
<protocol destdir="${build.src}">
|
|
<fileset dir="${mapred.src.dir}">
|
|
<include name="**/*.avpr" />
|
|
</fileset>
|
|
</protocol>
|
|
</target>
|
|
|
|
<target name="compile-mapred-classes" depends="init,avro-generate">
|
|
<taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
|
|
<classpath refid="classpath"/>
|
|
</taskdef>
|
|
<jsp-compile
|
|
uriroot="${src.webapps}/task"
|
|
outputdir="${build.src}"
|
|
package="org.apache.hadoop.mapred"
|
|
webxml="${build.webapps}/task/WEB-INF/web.xml">
|
|
</jsp-compile>
|
|
|
|
<jsp-compile
|
|
uriroot="${src.webapps}/job"
|
|
outputdir="${build.src}"
|
|
package="org.apache.hadoop.mapred"
|
|
webxml="${build.webapps}/job/WEB-INF/web.xml">
|
|
</jsp-compile>
|
|
|
|
<!-- Compile Java files (excluding JSPs) checking warnings -->
|
|
<javac
|
|
encoding="${build.encoding}"
|
|
srcdir="${mapred.src.dir};${build.src}"
|
|
includes="org/apache/hadoop/**/*.java"
|
|
destdir="${build.classes}"
|
|
debug="${javac.debug}"
|
|
optimize="${javac.optimize}"
|
|
target="${javac.version}"
|
|
source="${javac.version}"
|
|
deprecation="${javac.deprecation}">
|
|
<compilerarg line="${javac.args} ${javac.args.warnings}" />
|
|
<classpath refid="classpath"/>
|
|
</javac>
|
|
|
|
<copy todir="${build.classes}">
|
|
<fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
|
|
<fileset dir="${mapred.src.dir}" includes="**/META-INF/services/*"/>
|
|
<fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/>
|
|
<fileset dir="${mapred.src.dir}" includes="mapred-queues-default.xml"/>
|
|
</copy>
|
|
</target>
|
|
|
|
<target name="compile-tools" depends="init">
|
|
<javac
|
|
encoding="${build.encoding}"
|
|
srcdir="${tools.src}"
|
|
includes="org/apache/hadoop/**/*.java"
|
|
destdir="${build.tools}"
|
|
debug="${javac.debug}"
|
|
optimize="${javac.optimize}"
|
|
target="${javac.version}"
|
|
source="${javac.version}"
|
|
deprecation="${javac.deprecation}">
|
|
<compilerarg line="${javac.args} ${javac.args.warnings}" />
|
|
<classpath refid="classpath"/>
|
|
</javac>
|
|
|
|
<copy todir="${build.tools}">
|
|
<fileset
|
|
dir="${tools.src}"
|
|
includes="**/*.properties"
|
|
/>
|
|
</copy>
|
|
</target>
|
|
|
|
<target name="compile-core" depends="clover, compile-mapred-classes, compile-c++" description="Compile core only"/>
|
|
|
|
<target name="compile-contrib" depends="compile-core,tools">
|
|
<subant target="compile">
|
|
<property name="version" value="${version}"/>
|
|
<property name="hadoop-common.version" value="${hadoop-common.version}"/>
|
|
<fileset file="${contrib.dir}/build.xml"/>
|
|
</subant>
|
|
</target>
|
|
|
|
<target name="compile" depends="compile-core, compile-contrib, compile-tools" description="Compile core, contrib">
|
|
</target>
|
|
|
|
<target name="compile-examples"
|
|
depends="compile-core,compile-tools,compile-c++-examples">
|
|
<javac
|
|
encoding="${build.encoding}"
|
|
srcdir="${examples.dir}"
|
|
includes="org/apache/hadoop/**/*.java"
|
|
destdir="${build.examples}"
|
|
debug="${javac.debug}"
|
|
optimize="${javac.optimize}"
|
|
target="${javac.version}"
|
|
source="${javac.version}"
|
|
deprecation="${javac.deprecation}">
|
|
<compilerarg line="${javac.args} ${javac.args.warnings}" />
|
|
<classpath>
|
|
<path refid="classpath"/>
|
|
<pathelement location="${build.tools}"/>
|
|
</classpath>
|
|
</javac>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Make hadoop-mapred.jar -->
|
|
<!-- ================================================================== -->
|
|
<!-- -->
|
|
<!-- ================================================================== -->
|
|
<target name="jar" depends="compile-core" description="Make hadoop-mapered.jar">
|
|
<jar jarfile="${hadoop-mapred.jar}"
|
|
basedir="${build.classes}">
|
|
<manifest>
|
|
<section name="org/apache/hadoop">
|
|
<attribute name="Implementation-Title" value="${ant.project.name}"/>
|
|
<attribute name="Implementation-Version" value="${version}"/>
|
|
<attribute name="Implementation-Vendor" value="Apache"/>
|
|
</section>
|
|
</manifest>
|
|
<!-- <fileset file="${conf.dir}/commons-logging.properties"/>
|
|
<fileset file="${conf.dir}/log4j.properties"/>
|
|
<fileset file="${conf.dir}/hadoop-metrics.properties"/> -->
|
|
<zipfileset dir="${build.webapps}" prefix="webapps"/>
|
|
<fileset file="${jar.extra.properties.list}" />
|
|
</jar>
|
|
|
|
<jar jarfile="${hadoop-mapred-sources.jar}">
|
|
<fileset dir="${mapred.src.dir}" includes="org/apache/hadoop/**/*.java" />
|
|
<fileset dir="${build.src}" includes="org/apache/hadoop/**/*.java" />
|
|
</jar>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Make the Hadoop examples jar. -->
|
|
<!-- ================================================================== -->
|
|
<!-- -->
|
|
<!-- ================================================================== -->
|
|
<target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
|
|
<jar jarfile="${build.dir}/${examples.final.name}.jar"
|
|
basedir="${build.examples}">
|
|
<manifest>
|
|
<attribute name="Main-Class"
|
|
value="org/apache/hadoop/examples/ExampleDriver"/>
|
|
</manifest>
|
|
</jar>
|
|
|
|
<jar jarfile="${hadoop-mapred-examples-sources.jar}">
|
|
<fileset dir="${examples.dir}" includes="org/apache/hadoop/**/*.java" />
|
|
</jar>
|
|
</target>
|
|
|
|
<target name="tools" depends="jar, compile-tools"
|
|
description="Make the Hadoop tools jar.">
|
|
<jar jarfile="${build.dir}/${tools.final.name}.jar"
|
|
basedir="${build.tools}">
|
|
</jar>
|
|
|
|
<jar jarfile="${hadoop-mapred-tools-sources.jar}">
|
|
<fileset dir="${tools.src}" includes="org/apache/hadoop/**/*.java" />
|
|
</jar>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Compile test code -->
|
|
<!-- ================================================================== -->
|
|
|
|
<target name="compile-mapred-test" depends="compile-mapred-classes, compile-examples, ivy-retrieve-test">
|
|
|
|
<mkdir dir="${test.mapred.build.classes}"/>
|
|
<mkdir dir="${test.mapred.build.testjar}"/>
|
|
<mkdir dir="${test.mapred.build.testshell}"/>
|
|
|
|
<macro-compile-test
|
|
source.dir="${test.src.dir}/mapred;${test.src.dir}/unit"
|
|
dest.dir="${test.mapred.build.classes}"
|
|
classpath="test.classpath"/>
|
|
|
|
<javac
|
|
encoding="${build.encoding}"
|
|
srcdir="${test.src.dir}/mapred/testjar"
|
|
includes="*.java"
|
|
destdir="${test.mapred.build.testjar}"
|
|
debug="${javac.debug}"
|
|
optimize="${javac.optimize}"
|
|
target="${javac.version}"
|
|
source="${javac.version}"
|
|
deprecation="${javac.deprecation}">
|
|
<compilerarg line="${javac.args} ${javac.args.warnings}" />
|
|
<classpath refid="test.classpath"/>
|
|
</javac>
|
|
|
|
<delete file="${test.mapred.build.testjar}/testjob.jar"/>
|
|
<jar jarfile="${test.mapred.build.testjar}/testjob.jar"
|
|
basedir="${test.mapred.build.testjar}">
|
|
</jar>
|
|
|
|
<javac
|
|
encoding="${build.encoding}"
|
|
srcdir="${test.src.dir}/mapred/testshell"
|
|
includes="*.java"
|
|
destdir="${test.mapred.build.testshell}"
|
|
debug="${javac.debug}"
|
|
optimize="${javac.optimize}"
|
|
target="${javac.version}"
|
|
source="${javac.version}"
|
|
deprecation="${javac.deprecation}">
|
|
<compilerarg line="${javac.args} ${javac.args.warnings}"/>
|
|
<classpath refid="test.classpath"/>
|
|
</javac>
|
|
<delete file="${test.mapred.build.testshell}/testshell.jar"/>
|
|
<jar jarfile="${test.mapred.build.testshell}/testshell.jar"
|
|
basedir="${test.mapred.build.testshell}">
|
|
</jar>
|
|
|
|
<delete dir="${test.cache.data}"/>
|
|
<mkdir dir="${test.cache.data}"/>
|
|
<delete dir="${test.concat.data}"/>
|
|
<mkdir dir="${test.concat.data}"/>
|
|
<delete dir="${test.debug.data}"/>
|
|
<mkdir dir="${test.debug.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/cli/testMRConf.xml" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/cli/data60bytes" todir="${test.cache.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/concat.bz2" todir="${test.concat.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/concat.gz" todir="${test.concat.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testCompressThenConcat.txt.bz2" todir="${test.concat.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testCompressThenConcat.txt.gz" todir="${test.concat.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testConcatThenCompress.txt.bz2" todir="${test.concat.data}"/>
|
|
<copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testConcatThenCompress.txt.gz" todir="${test.concat.data}"/>
|
|
</target>
|
|
|
|
<macrodef name="macro-compile-test">
|
|
<attribute name="source.dir"/>
|
|
<attribute name="dest.dir"/>
|
|
<attribute name="classpath"/>
|
|
<sequential>
|
|
<mkdir dir="@{dest.dir}"/>
|
|
<javac
|
|
encoding="${build.encoding}"
|
|
srcdir="@{source.dir}"
|
|
includes="org/apache/hadoop/**/*.java"
|
|
destdir="@{dest.dir}"
|
|
debug="${javac.debug}"
|
|
optimize="${javac.optimize}"
|
|
target="${javac.version}"
|
|
source="${javac.version}"
|
|
deprecation="${javac.deprecation}">
|
|
<compilerarg line="${javac.args} ${javac.args.warnings}"/>
|
|
<classpath refid="@{classpath}"/>
|
|
</javac>
|
|
</sequential>
|
|
</macrodef>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Make hadoop-mapred-test.jar -->
|
|
<!-- ================================================================== -->
|
|
<!-- -->
|
|
<!-- ================================================================== -->
|
|
<target name="jar-test" depends="compile-mapred-test" description="Make hadoop-mapred-test.jar">
|
|
<copy todir="${test.build.classes}">
|
|
<fileset dir="${test.mapred.build.classes}"/>
|
|
</copy>
|
|
<subant buildpath="build.xml" target="-do-jar-test"/>
|
|
<jar jarfile="${hadoop-mapred-test-sources.jar}">
|
|
<fileset dir="${test.src.dir}/mapred" includes="org/apache/hadoop/**/*.java" />
|
|
<fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
|
|
<fileset dir="${test.src.dir}/mapred/testjar" includes="*.java" />
|
|
<fileset dir="${test.src.dir}/mapred/testshell" includes="*.java" />
|
|
</jar>
|
|
</target>
|
|
|
|
<target name="-do-jar-test">
|
|
<jar jarfile="${build.dir}/${test.final.name}.jar"
|
|
basedir="${test.build.classes}">
|
|
<manifest>
|
|
<attribute name="Main-Class"
|
|
value="org/apache/hadoop/test/MapredTestDriver"/>
|
|
<section name="org/apache/hadoop">
|
|
<attribute name="Implementation-Title" value="${ant.project.name}"/>
|
|
<attribute name="Implementation-Version" value="${version}"/>
|
|
<attribute name="Implementation-Vendor" value="Apache"/>
|
|
</section>
|
|
</manifest>
|
|
</jar>
|
|
</target>
|
|
<!-- ================================================================== -->
|
|
<!-- Define exclude lists for different kinds of testing -->
|
|
<!-- ================================================================== -->
|
|
<patternset id="empty.exclude.list.id" />
|
|
<patternset id="commit.smoke.exclude.list.id">
|
|
<excludesfile name="${test.mapred.commit.tests.file}"/>
|
|
<excludesfile name="${test.mapred.smoke.tests.file}"/>
|
|
</patternset>
|
|
|
|
<macrodef name="macro-test-runner">
|
|
<attribute name="test.file" />
|
|
<attribute name="suite.type" />
|
|
<attribute name="classpath" />
|
|
<attribute name="test.dir" />
|
|
<attribute name="fileset.dir" />
|
|
<attribute name="hadoop.conf.dir.deployed" default="" />
|
|
<attribute name="exclude.list.id" default="empty.exclude.list.id" />
|
|
<sequential>
|
|
<delete dir="@{test.dir}/data"/>
|
|
<mkdir dir="@{test.dir}/data"/>
|
|
<delete dir="@{test.dir}/logs"/>
|
|
<mkdir dir="@{test.dir}/logs"/>
|
|
<copy file="${test.src.dir}/hadoop-policy.xml"
|
|
todir="@{test.dir}/extraconf" />
|
|
<copy file="${test.src.dir}/fi-site.xml"
|
|
todir="@{test.dir}/extraconf" />
|
|
<junit showoutput="${test.output}"
|
|
printsummary="${test.junit.printsummary}"
|
|
haltonfailure="${test.junit.haltonfailure}"
|
|
fork="yes"
|
|
forkmode="${test.junit.fork.mode}"
|
|
maxmemory="${test.junit.maxmemory}"
|
|
dir="${basedir}" timeout="${test.timeout}"
|
|
errorProperty="tests.failed" failureProperty="tests.failed">
|
|
<jvmarg value="${test.junit.jvmargs}" />
|
|
<sysproperty key="java.net.preferIPv4Stack" value="true"/>
|
|
<sysproperty key="test.build.data" value="@{test.dir}/data"/>
|
|
<sysproperty key="java.security.krb5.conf"
|
|
value="${test.src.dir}/krb5.conf"/>
|
|
<sysproperty key="test.tools.input.dir" value = "${test.tools.input.dir}"/>
|
|
<sysproperty key="test.cache.data" value="${test.cache.data}"/>
|
|
<sysproperty key="test.concat.data" value="${test.concat.data}"/>
|
|
<sysproperty key="test.debug.data" value="${test.debug.data}"/>
|
|
<sysproperty key="hadoop.log.dir" value="@{test.dir}/logs"/>
|
|
<sysproperty key="test.src.dir" value="@{fileset.dir}"/>
|
|
<sysproperty key="taskcontroller-path" value="${taskcontroller-path}"/>
|
|
<sysproperty key="taskcontroller-ugi" value="${taskcontroller-ugi}"/>
|
|
<sysproperty key="test.build.extraconf" value="@{test.dir}/extraconf" />
|
|
<sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
|
|
<sysproperty key="java.library.path"
|
|
value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
|
|
<sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
|
|
<syspropertyset dynamic="no">
|
|
<propertyref name="hadoop.tmp.dir"/>
|
|
</syspropertyset>
|
|
<!-- set compile.c++ in the child jvm only if it is set -->
|
|
<syspropertyset dynamic="no">
|
|
<propertyref name="compile.c++"/>
|
|
</syspropertyset>
|
|
|
|
<!-- Pass probability specifications to the spawn JVM -->
|
|
<syspropertyset id="FaultProbabilityProperties">
|
|
<propertyref regex="fi.*"/>
|
|
</syspropertyset>
|
|
<sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
|
|
value="@{hadoop.conf.dir.deployed}" />
|
|
<classpath refid="@{classpath}"/>
|
|
<formatter type="${test.junit.output.format}" />
|
|
<batchtest todir="@{test.dir}" unless="testcase">
|
|
<fileset dir="@{fileset.dir}/@{suite.type}"
|
|
excludes="**/${test.exclude}.java aop/** system/**">
|
|
<patternset>
|
|
<includesfile name="@{test.file}"/>
|
|
<excludesfile name="${test.exclude.file}" />
|
|
</patternset>
|
|
<patternset refid="@{exclude.list.id}"/>
|
|
</fileset>
|
|
</batchtest>
|
|
<batchtest todir="@{test.dir}" if="testcase">
|
|
<fileset dir="@{fileset.dir}/mapred" includes="**/${testcase}.java"/>
|
|
<fileset dir="@{fileset.dir}/unit" includes="**/${testcase}.java"/>
|
|
<fileset dir="@{fileset.dir}/system/test" includes="**/${testcase}.java"/>
|
|
</batchtest>
|
|
<!--batch test to test all the testcases in aop folder with fault
|
|
injection-->
|
|
<batchtest todir="@{test.dir}" if="tests.notestcase.fi">
|
|
<fileset dir="@{fileset.dir}/aop"
|
|
includes="**/${test.include}.java"
|
|
excludes="**/${test.exclude}.java"
|
|
excludesfile="${test.exclude.file}" />
|
|
</batchtest>
|
|
<!-- batch test for testing a single test case in aop folder with
|
|
fault injection-->
|
|
<batchtest todir="@{test.dir}" if="tests.testcase.fi">
|
|
<fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java"/>
|
|
</batchtest>
|
|
<!--The following batch is for very special occasions only when
|
|
a non-FI tests are needed to be executed against FI-environment -->
|
|
<batchtest todir="@{test.dir}" if="tests.testcaseonly.fi">
|
|
<fileset dir="@{fileset.dir}/mapred"
|
|
includes="**/${testcase}.java"/>
|
|
</batchtest>
|
|
</junit>
|
|
<antcall target="checkfailure"/>
|
|
</sequential>
|
|
</macrodef>
|
|
|
|
<target name="run-test-mapred" depends="run-commit-test, run-smoke-test,
|
|
run-test-mapred-excluding-commit-and-smoke, run-test-mapred-all-withtestcaseonly"
|
|
description="Run mapred functional and system tests">
|
|
</target>
|
|
|
|
<target name="run-test-mapred-all-withtestcaseonly"
|
|
depends="compile-mapred-test" if="testcase">
|
|
<macro-test-runner
|
|
test.file="${test.mapred.all.tests.file}"
|
|
suite.type="mapred"
|
|
classpath="${test.classpath.id}"
|
|
test.dir="${test.build.dir}"
|
|
fileset.dir="${test.src.dir}"/>
|
|
</target>
|
|
|
|
<target name="run-test-mapred-excluding-commit-and-smoke"
|
|
depends="compile-mapred-test" unless="testcase">
|
|
<macro-test-runner
|
|
test.file="${test.mapred.all.tests.file}"
|
|
suite.type="mapred"
|
|
classpath="${test.classpath.id}"
|
|
test.dir="${test.build.dir}"
|
|
fileset.dir="${test.src.dir}"
|
|
exclude.list.id="commit.smoke.exclude.list.id"
|
|
/>
|
|
</target>
|
|
|
|
<target name="run-commit-test" depends="compile-mapred-test"
|
|
description="Run approximate 10-minute set of unit tests prior to commiting"
|
|
unless="testcase">
|
|
<macro-test-runner test.file="${test.mapred.commit.tests.file}" suite.type="mapred"
|
|
classpath="${test.classpath.id}"
|
|
test.dir="${test.build.dir}"
|
|
fileset.dir="${test.src.dir}"/>
|
|
</target>
|
|
|
|
<target name="run-smoke-test" depends="compile-mapred-test"
|
|
description="Run approximate 30-minute set of functional tests to guarantee viability of the MapReduce"
|
|
unless="testcase">
|
|
<macro-test-runner
|
|
test.file="${test.mapred.smoke.tests.file}"
|
|
suite.type="mapred"
|
|
classpath="${test.classpath.id}"
|
|
test.dir="${test.build.dir}"
|
|
fileset.dir="${test.src.dir}"/>
|
|
</target>
|
|
|
|
<target name="run-test-unit" depends="compile-mapred-test" description="Run unit tests">
|
|
<macro-test-runner test.file="${test.mapred.all.tests.file}" suite.type="unit"
|
|
classpath="${test.classpath.id}"
|
|
test.dir="${test.build.dir}"
|
|
fileset.dir="${test.src.dir}"/>
|
|
</target>
|
|
|
|
|
|
<target name="checkfailure" if="tests.failed">
|
|
<touch file="${test.build.dir}/testsfailed"/>
|
|
<fail unless="continueOnFailure">Tests failed!</fail>
|
|
</target>
|
|
|
|
<target name="test-contrib" depends="compile,compile-tools,compile-mapred-test" description="Run contrib unit tests">
|
|
<subant target="test">
|
|
<property name="version" value="${version}"/>
|
|
<property name="clover.jar" value="${clover.jar}"/>
|
|
<fileset file="${contrib.dir}/build.xml"/>
|
|
</subant>
|
|
</target>
|
|
|
|
<target name="test-core" description="Run core, hdfs and mapred unit tests">
|
|
<delete file="${test.build.dir}/testsfailed"/>
|
|
<property name="continueOnFailure" value="true"/>
|
|
<antcall target="run-test-mapred"/>
|
|
<available file="${test.build.dir}/testsfailed" property="testsfailed"/>
|
|
<fail if="testsfailed">Tests failed!</fail>
|
|
</target>
|
|
|
|
<target name="test" depends="jar-test, test-core" description="Run all unit tests">
|
|
<subant target="test-contrib">
|
|
<fileset file="${basedir}/build.xml"/>
|
|
</subant>
|
|
</target>
|
|
|
|
<!-- Run all unit tests, not just Test*, and use non-test configuration. -->
|
|
<target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
|
|
<antcall target="test">
|
|
<param name="test.include" value="*"/>
|
|
<param name="test.classpath.id" value="test.cluster.classpath"/>
|
|
</antcall>
|
|
</target>
|
|
|
|
<target name="nightly" depends="test, tar">
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Run optional third-party tool targets -->
|
|
<!-- ================================================================== -->
|
|
<target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
|
|
<taskdef resource="checkstyletask.properties">
|
|
<classpath refid="checkstyle-classpath"/>
|
|
</taskdef>
|
|
|
|
<mkdir dir="${test.build.dir}"/>
|
|
|
|
<checkstyle config="${test.src.dir}/checkstyle.xml"
|
|
failOnViolation="false">
|
|
<fileset dir="${mapred.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
|
|
<formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
|
|
</checkstyle>
|
|
|
|
<xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
|
|
in="${test.build.dir}/checkstyle-errors.xml"
|
|
out="${test.build.dir}/checkstyle-errors.html"/>
|
|
</target>
|
|
|
|
<target name="check-for-checkstyle">
|
|
<available property="checkstyle.present" resource="checkstyletask.properties">
|
|
<classpath refid="checkstyle-classpath"/>
|
|
</available>
|
|
</target>
|
|
|
|
<target name="all-jars" depends="tools,examples,jar-test">
|
|
<subant target="jar">
|
|
<property name="version" value="${version}"/>
|
|
<property name="dist.dir" value="${dist.dir}"/>
|
|
<fileset file="${contrib.dir}/capacity-scheduler/build.xml"/>
|
|
<fileset file="${contrib.dir}/streaming/build.xml"/>
|
|
<fileset file="${contrib.dir}/gridmix/build.xml"/>
|
|
<fileset file="${contrib.dir}/mumak/build.xml"/>
|
|
</subant>
|
|
</target>
|
|
|
|
<property name="findbugs.home" value=""/>
|
|
<target name="findbugs" depends="check-for-findbugs, all-jars" if="findbugs.present" description="Run findbugs if present">
|
|
<property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
|
|
<property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
|
|
<property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
|
|
<property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
|
|
<taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
|
|
classpath="${findbugs.home}/lib/findbugs-ant.jar" />
|
|
|
|
<mkdir dir="${findbugs.out.dir}"/>
|
|
|
|
<findbugs home="${findbugs.home}" output="xml:withMessages"
|
|
outputFile="${findbugs.report.xmlfile}" effort="max"
|
|
excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx${findbugs.heap.size}">
|
|
<auxClasspath>
|
|
<fileset dir="${lib.dir}">
|
|
<include name="**/*.jar"/>
|
|
</fileset>
|
|
<fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common">
|
|
<include name="**/*.jar"/>
|
|
</fileset>
|
|
</auxClasspath>
|
|
<sourcePath path="${mapred.src.dir}"/>
|
|
<sourcePath path="${examples.dir}" />
|
|
<sourcePath path="${tools.src}" />
|
|
<sourcePath path="${basedir}/src/contrib/capacity-scheduler/src/java" />
|
|
<sourcePath path="${basedir}/src/contrib/streaming/src/java" />
|
|
<sourcePath path="${basedir}/src/contrib/gridmix/src/java" />
|
|
<sourcePath path="${basedir}/src/contrib/mumak/src/java" />
|
|
<class location="${basedir}/build/${final.name}.jar" />
|
|
<class location="${basedir}/build/${examples.final.name}.jar" />
|
|
<class location="${basedir}/build/${tools.final.name}.jar" />
|
|
<class location="${basedir}/build/contrib/capacity-scheduler/hadoop-${version}-capacity-scheduler.jar" />
|
|
<class location="${basedir}/build/contrib/streaming/hadoop-${version}-streaming.jar" />
|
|
<class location="${basedir}/build/contrib/gridmix/hadoop-${version}-gridmix.jar" />
|
|
<class location="${basedir}/build/contrib/mumak/hadoop-${version}-mumak.jar" />
|
|
</findbugs>
|
|
|
|
<xslt style="${findbugs.home}/src/xsl/default.xsl"
|
|
in="${findbugs.report.xmlfile}"
|
|
out="${findbugs.report.htmlfile}"/>
|
|
</target>
|
|
|
|
<target name="check-for-findbugs">
|
|
<available property="findbugs.present"
|
|
file="${findbugs.home}/lib/findbugs.jar" />
|
|
</target>
|
|
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Documentation -->
|
|
<!-- ================================================================== -->
|
|
|
|
<target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if="forrest.home">
|
|
<!-- The template file may not exist if building from a tarball -->
|
|
<copy file="${conf.dir}/mapred-queues.xml.template"
|
|
tofile="${build.docs}/mapred-queues.xml" failonerror="false"/>
|
|
<exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
|
|
failonerror="true">
|
|
</exec>
|
|
<copy todir="${build.docs}">
|
|
<fileset dir="${docs.src}/build/site/" />
|
|
</copy>
|
|
<copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
|
|
<style basedir="${mapred.src.dir}" destdir="${build.docs}"
|
|
includes="mapred-default.xml" style="conf/configuration.xsl"/>
|
|
<antcall target="changes-to-html"/>
|
|
<subant target="docs">
|
|
<property name="build.docs" value="${build.docs}"/>
|
|
<fileset file="${contrib.dir}/build.xml"/>
|
|
</subant>
|
|
</target>
|
|
|
|
<target name="javadoc-dev" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc for hadoop developers">
|
|
<mkdir dir="${build.javadoc.dev}"/>
|
|
<javadoc
|
|
overview="${mapred.src.dir}/overview.html"
|
|
packagenames="org.apache.hadoop.*"
|
|
destdir="${build.javadoc.dev}"
|
|
author="true"
|
|
version="true"
|
|
use="true"
|
|
windowtitle="${Name} ${version} API"
|
|
doctitle="${Name} ${version} Developer API"
|
|
bottom="Copyright &copy; ${year} The Apache Software Foundation"
|
|
maxmemory="${javadoc.maxmemory}"
|
|
>
|
|
<packageset dir="${mapred.src.dir}"/>
|
|
<packageset dir="${examples.dir}"/>
|
|
<packageset dir="${tools.src}"/>
|
|
|
|
<packageset dir="src/contrib/data_join/src/java"/>
|
|
<packageset dir="src/contrib/gridmix/src/java"/>
|
|
<packageset dir="src/contrib/index/src/java"/>
|
|
<packageset dir="src/contrib/mumak/src/java"/>
|
|
<packageset dir="src/contrib/streaming/src/java"/>
|
|
<packageset dir="src/contrib/vaidya/src/java"/>
|
|
<packageset dir="src/contrib/vertica/src/java"/>
|
|
|
|
<link href="${javadoc.link.java}"/>
|
|
|
|
<classpath >
|
|
<path refid="classpath" />
|
|
<fileset dir="src/contrib/">
|
|
<include name="*/lib/*.jar" />
|
|
</fileset>
|
|
<path refid="javadoc-classpath"/>
|
|
<pathelement path="${java.class.path}"/>
|
|
<pathelement path="${lib.dir}/hadoop-common-test-${hadoop-common.version}.jar"/>
|
|
<pathelement location="${build.tools}"/>
|
|
</classpath>
|
|
|
|
<group title="Packages" packages="org.apache.*"/>
|
|
<group title="Libraries" packages="org.apache.hadoop.mapred.lib*:org.apache.hadoop.mapreduce.lib*"/>
|
|
<group title="Tools" packages="org.apache.hadoop.tools"/>
|
|
<group title="Examples" packages="org.apache.hadoop.examples*"/>
|
|
|
|
<group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
|
|
<group title="contrib: Gridmix" packages="org.apache.hadoop.mapred.gridmix*"/>
|
|
<group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
|
|
<group title="contrib: Streaming" packages="org.apache.hadoop.streaming*:org.apache.hadoop.typedbytes*"/>
|
|
<group title="contrib: Vaidya" packages="org.apache.hadoop.vaidya*"/>
|
|
<group title="contrib: Vertica" packages="org.apache.hadoop.vertica*"/>
|
|
</javadoc>
|
|
</target>
|
|
|
|
<target name="javadoc-uptodate" depends="compile, ivy-retrieve-common, ivy-retrieve-javadoc">
|
|
<uptodate property="javadoc.is.uptodate">
|
|
<srcfiles dir="${src.dir}">
|
|
<include name="**/*.java" />
|
|
<include name="**/*.html" />
|
|
</srcfiles>
|
|
<mapper type="merge" to="${build.javadoc.timestamp}" />
|
|
</uptodate>
|
|
</target>
|
|
|
|
<target name="javadoc" description="Generate javadoc" depends="javadoc-uptodate"
|
|
unless="javadoc.is.uptodate">
|
|
<mkdir dir="${build.javadoc}"/>
|
|
<javadoc
|
|
overview="${mapred.src.dir}/overview.html"
|
|
packagenames="org.apache.hadoop.*"
|
|
destdir="${build.javadoc}"
|
|
author="true"
|
|
version="true"
|
|
use="true"
|
|
windowtitle="${Name} ${version} API"
|
|
doctitle="${Name} ${version} API"
|
|
bottom="Copyright &copy; ${year} The Apache Software Foundation"
|
|
maxmemory="${javadoc.maxmemory}"
|
|
>
|
|
<packageset dir="${mapred.src.dir}"/>
|
|
<packageset dir="${examples.dir}"/>
|
|
|
|
<!-- Don't include contrib modules that use the same packages as core
|
|
MapReduce. This includes capacity-scheduler, dynamic-scheduler,
|
|
fairscheduler, mumak. See also the javadoc-dev target. -->
|
|
<packageset dir="src/contrib/data_join/src/java"/>
|
|
<packageset dir="src/contrib/gridmix/src/java"/>
|
|
<packageset dir="src/contrib/index/src/java"/>
|
|
<packageset dir="src/contrib/streaming/src/java"/>
|
|
<packageset dir="src/contrib/vaidya/src/java"/>
|
|
<packageset dir="src/contrib/vertica/src/java"/>
|
|
<packageset dir="${tools.src}"/>
|
|
|
|
<link href="${javadoc.link.java}"/>
|
|
|
|
<classpath >
|
|
<path refid="classpath" />
|
|
<fileset dir="src/contrib/">
|
|
<include name="*/lib/*.jar" />
|
|
</fileset>
|
|
<path refid="javadoc-classpath"/>
|
|
<pathelement path="${java.class.path}"/>
|
|
<pathelement path="${lib.dir}/hadoop-common-test-${hadoop-common.version}.jar"/>
|
|
<pathelement location="${build.tools}"/>
|
|
</classpath>
|
|
|
|
<group title="Packages" packages="org.apache.*"/>
|
|
<group title="Libraries" packages="org.apache.hadoop.mapred.lib*:org.apache.hadoop.mapreduce.lib*"/>
|
|
<group title="Tools" packages="org.apache.hadoop.fs*:org.apache.hadoop.tools*:org.apache.hadoop.mapred.tool*:org.apache.hadoop.mapreduce.tool*"/>
|
|
<group title="Examples" packages="org.apache.hadoop.examples*"/>
|
|
|
|
<group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
|
|
<group title="contrib: Gridmix" packages="org.apache.hadoop.mapred.gridmix*"/>
|
|
<group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
|
|
<group title="contrib: Streaming" packages="org.apache.hadoop.streaming*:org.apache.hadoop.typedbytes*"/>
|
|
<group title="contrib: Vaidya" packages="org.apache.hadoop.vaidya*"/>
|
|
<group title="contrib: Vertica" packages="org.apache.hadoop.vertica*"/>
|
|
|
|
<doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet"
|
|
path="${common.ivy.lib.dir}/hadoop-common-${hadoop-common.version}.jar"/>
|
|
</javadoc>
|
|
</target>
|
|
|
|
<target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null">
|
|
<javadoc maxmemory="${javadoc.maxmemory}">
|
|
<doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
|
|
path="${common.ivy.lib.dir}/hadoop-common-${hadoop-common.version}.jar:${jdiff.jar}:${xerces.jar}">
|
|
<param name="-apidir" value="${jdiff.xml.dir}"/>
|
|
<param name="-apiname" value="hadoop-mapred ${version}"/>
|
|
<param name="${jdiff.stability}"/>
|
|
</doclet>
|
|
<packageset dir="src/java"/>
|
|
<packageset dir="src/tools"/>
|
|
<packageset dir="${tools.src}"/>
|
|
<classpath >
|
|
<path refid="classpath" />
|
|
<path refid="jdiff-classpath" />
|
|
<pathelement path="${java.class.path}"/>
|
|
</classpath>
|
|
</javadoc>
|
|
</target>
|
|
|
|
<target name="write-null">
|
|
<exec executable="touch">
|
|
<arg value="${jdiff.home}/Null.java"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="api-report" depends="ivy-retrieve-jdiff,api-xml">
|
|
<mkdir dir="${jdiff.build.dir}"/>
|
|
<javadoc destdir="${jdiff.build.dir}"
|
|
sourceFiles="${jdiff.home}/Null.java"
|
|
maxmemory="${javadoc.maxmemory}">
|
|
<doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
|
|
path="${common.ivy.lib.dir}/hadoop-common-${hadoop-common.version}.jar:${jdiff.jar}:${xerces.jar}">
|
|
<param name="-oldapi" value="hadoop-mapred ${jdiff.stable}"/>
|
|
<param name="-newapi" value="hadoop-mapred ${version}"/>
|
|
<param name="-oldapidir" value="${jdiff.xml.dir}"/>
|
|
<param name="-newapidir" value="${jdiff.xml.dir}"/>
|
|
<param name="-javadocold" value="${jdiff.stable.javadoc}"/>
|
|
<param name="-javadocnew" value="../../api/"/>
|
|
<param name="-stats"/>
|
|
<param name="${jdiff.stability}"/>
|
|
<param name="${jdiff.compatibility}"/>
|
|
</doclet>
|
|
<packageset dir="src/java"/>
|
|
<packageset dir="src/tools"/>
|
|
<packageset dir="${tools.src}"/>
|
|
<classpath >
|
|
<path refid="classpath" />
|
|
<path refid="jdiff-classpath"/>
|
|
<pathelement path="${java.class.path}"/>
|
|
</classpath>
|
|
</javadoc>
|
|
</target>
|
|
|
|
<target name="changes-to-html" description="Convert CHANGES.txt into an html file">
|
|
<mkdir dir="${build.docs}"/>
|
|
<exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
|
|
<arg value="${changes.src}/changes2html.pl"/>
|
|
</exec>
|
|
<copy todir="${build.docs}">
|
|
<fileset dir="${changes.src}" includes="*.css"/>
|
|
</copy>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- D I S T R I B U T I O N -->
|
|
<!-- ================================================================== -->
|
|
<!-- -->
|
|
<!-- ================================================================== -->
|
|
<target name="package" depends="compile, jar, javadoc, docs, api-report, examples, tools, jar-test, package-librecordio"
|
|
description="Build distribution">
|
|
<mkdir dir="${dist.dir}"/>
|
|
<mkdir dir="${dist.dir}/lib"/>
|
|
<mkdir dir="${dist.dir}/contrib"/>
|
|
<mkdir dir="${dist.dir}/bin"/>
|
|
<mkdir dir="${dist.dir}/docs"/>
|
|
<mkdir dir="${dist.dir}/docs/api"/>
|
|
<mkdir dir="${dist.dir}/docs/jdiff"/>
|
|
|
|
<copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
|
|
<fileset dir="${mapred.ivy.lib.dir}"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/lib" includeEmptyDirs="false">
|
|
<fileset dir="lib">
|
|
<exclude name="**/native/**"/>
|
|
</fileset>
|
|
</copy>
|
|
|
|
<subant target="compile-test">
|
|
<!--Pass down the version in case its needed again and the target
|
|
distribution directory so contribs know where to install to.-->
|
|
<property name="version" value="${version}"/>
|
|
<property name="dist.dir" value="${dist.dir}"/>
|
|
<fileset file="${contrib.dir}/build.xml"/>
|
|
</subant>
|
|
<subant target="package">
|
|
<!--Pass down the version in case its needed again and the target
|
|
distribution directory so contribs know where to install to.-->
|
|
<property name="version" value="${version}"/>
|
|
<property name="dist.dir" value="${dist.dir}"/>
|
|
<fileset file="${contrib.dir}/build.xml"/>
|
|
</subant>
|
|
|
|
<copy todir="${dist.dir}/webapps">
|
|
<fileset dir="${build.webapps}"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}">
|
|
<fileset file="${build.dir}/${final.name}*.jar"/>
|
|
<fileset file="${build.dir}/${test.final.name}.jar"/>
|
|
<fileset file="${build.dir}/${examples.final.name}.jar"/>
|
|
<fileset file="${build.dir}/${tools.final.name}.jar"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/bin">
|
|
<fileset dir="bin"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/conf">
|
|
<fileset dir="${conf.dir}" excludes="**/*.template"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/docs">
|
|
<fileset dir="${build.docs}"/>
|
|
</copy>
|
|
|
|
<copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
|
|
|
|
<copy todir="${dist.dir}/ivy">
|
|
<fileset dir="ivy"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}">
|
|
<fileset dir=".">
|
|
<include name="*.txt" />
|
|
</fileset>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/src" includeEmptyDirs="true">
|
|
<fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/c++" includeEmptyDirs="false">
|
|
<fileset dir="${build.dir}/c++"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/" file="build.xml"/>
|
|
<copy todir="${dist.dir}/" file="build-utils.xml"/>
|
|
|
|
<chmod perm="ugo+x" type="file" parallel="false">
|
|
<fileset file="${dist.dir}/src/examples/pipes/configure"/>
|
|
<fileset file="${dist.dir}/src/c++/utils/configure"/>
|
|
<fileset file="${dist.dir}/src/c++/pipes/configure"/>
|
|
<fileset file="${dist.dir}/src/c++/task-controller/configure"/>
|
|
</chmod>
|
|
<chmod perm="ugo+x" type="file" parallel="false">
|
|
<fileset dir="${dist.dir}/bin"/>
|
|
<fileset dir="${dist.dir}/src/contrib/">
|
|
<include name="*/bin/*" />
|
|
</fileset>
|
|
</chmod>
|
|
<chmod perm="ugo+x" type="file">
|
|
<fileset dir="${dist.dir}/src/c++/pipes/debug"/>
|
|
</chmod>
|
|
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Make release tarball -->
|
|
<!-- ================================================================== -->
|
|
<target name="tar" depends="package" description="Make release tarball">
|
|
<macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
|
|
<param.listofitems>
|
|
<tarfileset dir="${build.dir}" mode="664">
|
|
<exclude name="${final.name}/bin/*" />
|
|
<exclude name="${final.name}/contrib/*/bin/*" />
|
|
<exclude name="${final.name}/src/examples/pipes/configure"/>
|
|
<exclude name="${final.name}/src/c++/utils/configure"/>
|
|
<exclude name="${final.name}/src/c++/pipes/configure"/>
|
|
<exclude name="${final.name}/src/c++/task-controller/configure"/>
|
|
<include name="${final.name}/**" />
|
|
</tarfileset>
|
|
<tarfileset dir="${build.dir}" mode="755">
|
|
<include name="${final.name}/bin/*" />
|
|
<include name="${final.name}/contrib/*/bin/*" />
|
|
<include name="${final.name}/src/examples/pipes/configure"/>
|
|
<include name="${final.name}/src/c++/utils/configure"/>
|
|
<include name="${final.name}/src/c++/pipes/configure"/>
|
|
<include name="${final.name}/src/c++/task-controller/configure"/>
|
|
</tarfileset>
|
|
</param.listofitems>
|
|
</macro_tar>
|
|
</target>
|
|
|
|
<target name="bin-package" depends="compile, jar, examples, tools, jar-test, package-librecordio"
|
|
description="assembles artifacts for binary target">
|
|
<mkdir dir="${dist.dir}"/>
|
|
<mkdir dir="${dist.dir}/include"/>
|
|
<mkdir dir="${dist.dir}/lib"/>
|
|
<mkdir dir="${dist.dir}/${package.share.dir}/contrib"/>
|
|
<mkdir dir="${dist.dir}/${package.share.dir}/lib"/>
|
|
<mkdir dir="${dist.dir}/${package.share.dir}/templates"/>
|
|
<mkdir dir="${dist.dir}/bin"/>
|
|
<mkdir dir="${dist.dir}/sbin"/>
|
|
|
|
<!-- enable this if there is mapred specific dependencies
|
|
<copy todir="${dist.dir}/${package.share.dir}/lib" includeEmptyDirs="false" flatten="true">
|
|
<fileset dir="${mapred.ivy.lib.dir}"/>
|
|
</copy> -->
|
|
|
|
<copy todir="${dist.dir}/include" includeEmptyDirs="false">
|
|
<fileset dir="${build.dir}/c++/${build.platform}/include"
|
|
erroronmissingdir="false">
|
|
<include name="**"/>
|
|
</fileset>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/lib" includeEmptyDirs="false">
|
|
<fileset dir="${build.dir}/c++/${build.platform}/lib"
|
|
erroronmissingdir="false">
|
|
<include name="**"/>
|
|
</fileset>
|
|
</copy>
|
|
|
|
<subant target="package">
|
|
<!--Pass down the version in case its needed again and the target
|
|
distribution directory so contribs know where to install to.-->
|
|
<property name="version" value="${version}"/>
|
|
<property name="dist.dir" value="${dist.dir}/${package.share.dir}"/>
|
|
<fileset file="${contrib.dir}/build.xml"/>
|
|
</subant>
|
|
|
|
<copy todir="${dist.dir}/${package.share.dir}">
|
|
<fileset file="${build.dir}/${final.name}*.jar"/>
|
|
<fileset file="${build.dir}/${test.final.name}.jar"/>
|
|
<fileset file="${build.dir}/${examples.final.name}.jar"/>
|
|
<fileset file="${build.dir}/${tools.final.name}.jar"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/bin">
|
|
<fileset dir="bin">
|
|
<include name="mapred"/>
|
|
</fileset>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/libexec">
|
|
<fileset dir="bin">
|
|
<include name="mapred-config.sh"/>
|
|
</fileset>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/sbin">
|
|
<fileset dir="bin">
|
|
<include name="start-*.sh"/>
|
|
<include name="stop-*.sh"/>
|
|
</fileset>
|
|
</copy>
|
|
|
|
<copy file="${basedir}/src/packages/update-mapred-env.sh" tofile="${dist.dir}/sbin/update-mapred-env.sh"/>
|
|
<copy file="${basedir}/src/packages/rpm/init.d/hadoop-jobtracker" tofile="${dist.dir}/sbin/hadoop-jobtracker.redhat"/>
|
|
<copy file="${basedir}/src/packages/rpm/init.d/hadoop-tasktracker" tofile="${dist.dir}/sbin/hadoop-tasktracker.redhat"/>
|
|
<copy file="${basedir}/src/packages/deb/init.d/hadoop-jobtracker" tofile="${dist.dir}/sbin/hadoop-jobtracker.debian"/>
|
|
<copy file="${basedir}/src/packages/deb/init.d/hadoop-tasktracker" tofile="${dist.dir}/sbin/hadoop-tasktracker.debian"/>
|
|
|
|
<copy file="${basedir}/src/packages/update-mapred-env.sh" tofile="${dist.dir}/sbin/update-mapred-env.sh"/>
|
|
|
|
<copy todir="${dist.dir}/etc/hadoop">
|
|
<fileset dir="${conf.dir}" excludes="**/*.template"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/${package.share.dir}/templates">
|
|
<fileset dir="${basedir}/src/packages/templates/conf" includes="*"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/${package.share.dir}/webapps">
|
|
<fileset dir="${build.webapps}"/>
|
|
</copy>
|
|
|
|
<copy todir="${dist.dir}/share/doc/hadoop/${module}">
|
|
<fileset dir=".">
|
|
<include name="*.txt" />
|
|
</fileset>
|
|
</copy>
|
|
|
|
<chmod perm="ugo+x" type="file" parallel="false">
|
|
<fileset dir="${dist.dir}/bin"/>
|
|
<fileset dir="${dist.dir}/sbin"/>
|
|
</chmod>
|
|
</target>
|
|
|
|
<target name="binary-system" depends="bin-package, jar-system, jar-test-system"
|
|
description="make system test package for deployment">
|
|
<!--TODO!!!!! fix this shit...-->
|
|
<copy todir="${system-test-build-dir}/${final.name}">
|
|
<fileset dir="${dist.dir}">
|
|
</fileset>
|
|
</copy>
|
|
<copy todir="${system-test-build-dir}/${final.name}/conf">
|
|
<fileset dir="${test.src.dir}/system/conf/"/>
|
|
</copy>
|
|
<copy todir="${system-test-build-dir}">
|
|
<fileset dir="${build.dir}">
|
|
<include name="${test.final.name}.jar"/>
|
|
<include name="${examples.final.name}.jar"/>
|
|
</fileset>
|
|
</copy>
|
|
<copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-common-${hadoop-common.version}.jar"
|
|
file="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${hadoop-common.version}.jar"
|
|
overwrite="true"/>
|
|
<copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-hdfs-${version}.jar"
|
|
file="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-hdfs-${herriot.suffix}-${version}.jar"
|
|
overwrite="true"/>
|
|
<copy tofile="${system-test-build-dir}/${final.name}/${final.name}.jar"
|
|
file="${system-test-build-dir}/${instrumented.final.name}.jar" overwrite="true"/>
|
|
<copy tofile="${system-test-build-dir}/${final.name}/${final.name}-sources.jar"
|
|
file="${system-test-build-dir}/${instrumented.final.name}-sources.jar" overwrite="true"/>
|
|
<macro_tar
|
|
param.destfile="${system-test-build-dir}/${final.name}-bin.${herriot.suffix}.tar.gz">
|
|
<param.listofitems>
|
|
<tarfileset dir="${system-test-build-dir}" mode="664">
|
|
<exclude name="${final.name}/bin/*" />
|
|
<exclude name="${final.name}/src/**" />
|
|
<exclude name="${final.name}/docs/**" />
|
|
<include name="${final.name}/**" />
|
|
</tarfileset>
|
|
</param.listofitems>
|
|
</macro_tar>
|
|
</target>
|
|
|
|
<target name="binary" depends="bin-package" description="Make tarball without source and documentation">
|
|
<macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
|
|
<param.listofitems>
|
|
<tarfileset dir="${build.dir}" mode="664">
|
|
<exclude name="${final.name}/bin/*" />
|
|
<exclude name="${final.name}/src/**" />
|
|
<exclude name="${final.name}/docs/**" />
|
|
<include name="${final.name}/**" />
|
|
</tarfileset>
|
|
<tarfileset dir="${build.dir}" mode="755">
|
|
<include name="${final.name}/bin/*" />
|
|
</tarfileset>
|
|
</param.listofitems>
|
|
</macro_tar>
|
|
</target>
|
|
|
|
<target name="rpm" depends="binary" description="Make rpm package">
|
|
<mkdir dir="${package.buildroot}/BUILD" />
|
|
<mkdir dir="${package.buildroot}/RPMS" />
|
|
<mkdir dir="${package.buildroot}/SRPMS" />
|
|
<mkdir dir="${package.buildroot}/SOURCES" />
|
|
<mkdir dir="${package.buildroot}/SPECS" />
|
|
<copy todir="${package.buildroot}/SOURCES">
|
|
<fileset dir="${build.dir}">
|
|
<include name="${final.name}-bin.tar.gz" />
|
|
</fileset>
|
|
</copy>
|
|
<copy file="${src.dir}/packages/rpm/spec/hadoop-mapred.spec" todir="${package.buildroot}/SPECS">
|
|
<filterchain>
|
|
<replacetokens>
|
|
<token key="final.name" value="${final.name}" />
|
|
<token key="version" value="${_version}" />
|
|
<token key="package.release" value="${package.release}" />
|
|
<token key="package.build.dir" value="${package.build.dir}" />
|
|
<token key="package.prefix" value="${package.prefix}" />
|
|
<token key="package.conf.dir" value="${package.conf.dir}" />
|
|
<token key="package.log.dir" value="${package.log.dir}" />
|
|
<token key="package.pid.dir" value="${package.pid.dir}" />
|
|
<token key="package.var.dir" value="${package.var.dir}" />
|
|
</replacetokens>
|
|
</filterchain>
|
|
</copy>
|
|
<rpm specFile="hadoop-mapred.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
|
|
<copy todir="${build.dir}/" flatten="true">
|
|
<fileset dir="${package.buildroot}/RPMS">
|
|
<include name="**/*.rpm" />
|
|
</fileset>
|
|
</copy>
|
|
<delete dir="${package.buildroot}" quiet="true" verbose="false"/>
|
|
</target>
|
|
|
|
<target name="deb" depends="ivy-retrieve-package, binary" description="Make deb package">
|
|
<taskdef name="deb"
|
|
classname="org.vafer.jdeb.ant.DebAntTask">
|
|
<classpath refid="ivy-package.classpath" />
|
|
</taskdef>
|
|
|
|
<mkdir dir="${package.build.dir}/hadoop.control" />
|
|
<mkdir dir="${package.buildroot}/${package.prefix}" />
|
|
<copy todir="${package.buildroot}/${package.prefix}">
|
|
<fileset dir="${build.dir}/${final.name}">
|
|
<include name="**" />
|
|
</fileset>
|
|
</copy>
|
|
<copy todir="${package.build.dir}/hadoop.control">
|
|
<fileset dir="${src.dir}/packages/deb/hadoop.control">
|
|
<exclude name="control" />
|
|
</fileset>
|
|
</copy>
|
|
<copy file="${src.dir}/packages/deb/hadoop.control/control" todir="${package.build.dir}/hadoop.control">
|
|
<filterchain>
|
|
<replacetokens>
|
|
<token key="final.name" value="${final.name}" />
|
|
<token key="version" value="${_version}" />
|
|
<token key="package.release" value="${package.release}" />
|
|
<token key="package.build.dir" value="${package.build.dir}" />
|
|
<token key="package.prefix" value="${package.prefix}" />
|
|
<token key="package.conf.dir" value="${package.conf.dir}" />
|
|
<token key="package.log.dir" value="${package.log.dir}" />
|
|
<token key="package.pid.dir" value="${package.pid.dir}" />
|
|
</replacetokens>
|
|
</filterchain>
|
|
</copy>
|
|
<deb destfile="${package.buildroot}/${name}_${_version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
|
|
<tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
|
|
<exclude name="bin/*" />
|
|
<exclude name="${package.share.dir}/contrib/*/bin/*" />
|
|
<exclude name="etc" />
|
|
<exclude name="etc/**" />
|
|
<exclude name="libexec/*" />
|
|
<exclude name="sbin/*" />
|
|
<include name="**" />
|
|
</tarfileset>
|
|
<tarfileset dir="${build.dir}/${final.name}" filemode="755" prefix="${package.prefix}">
|
|
<include name="bin/*" />
|
|
<include name="sbin/*" />
|
|
<exclude name="sbin/*.redhat" />
|
|
<exclude name="sbin/*.debian" />
|
|
<include name="libexec/*" />
|
|
<include name="${package.share.dir}/contrib/*/bin/*" />
|
|
</tarfileset>
|
|
<tarfileset dir="${src.dir}/packages" filemode="755" prefix="${package.prefix}/sbin">
|
|
<include name="*.sh" />
|
|
</tarfileset>
|
|
<tarfileset dir="${build.dir}/${final.name}/etc/hadoop" filemode="644" prefix="${package.conf.dir}">
|
|
<exclude name="configuration.xsl" />
|
|
<exclude name="hadoop-metrics2.properties" />
|
|
<exclude name="core-site.xml" />
|
|
<exclude name="hdfs-site.xml" />
|
|
<exclude name="mapred-site.xml" />
|
|
<include name="**" />
|
|
</tarfileset>
|
|
<tarfileset dir="${basedir}/src/packages/deb/init.d" filemode="755" prefix="/etc/init.d">
|
|
<include name="**" />
|
|
</tarfileset>
|
|
</deb>
|
|
<copy todir="${build.dir}/" flatten="true">
|
|
<fileset dir="${package.buildroot}">
|
|
<include name="**/${name}*.deb" />
|
|
</fileset>
|
|
</copy>
|
|
<delete dir="${package.buildroot}" quiet="true" verbose="false"/>
|
|
</target>
|
|
|
|
<target name="ant-task-download" description="To download mvn-ant-task">
|
|
<get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
|
|
</target>
|
|
|
|
<target name="mvn-taskdef" depends="ant-task-download">
|
|
<path id="mvn-ant-task.classpath" path="${ant_task.jar}"/>
|
|
<typedef resource="org/apache/maven/artifact/ant/antlib.xml"
|
|
uri="urn:maven-artifact-ant"
|
|
classpathref="mvn-ant-task.classpath"/>
|
|
</target>
|
|
|
|
<target name="clean-cache" description="Clean. Delete ivy cache">
|
|
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-common"/>
|
|
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-common-test"/>
|
|
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs"/>
|
|
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-test"/>
|
|
</target>
|
|
|
|
<target name="mvn-install" depends="mvn-taskdef,jar,jar-test,set-version,-mvn-system-install"
|
|
description="To install hadoop mapreduce and test jars to local filesystem's m2 cache">
|
|
<artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
|
|
<artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
|
|
<artifact:install file="${hadoop-mapred.jar}">
|
|
<pom refid="hadoop.mapred"/>
|
|
<attach file="${hadoop-mapred-sources.jar}" classifier="sources" />
|
|
</artifact:install>
|
|
<artifact:install file="${hadoop-mapred-test.jar}">
|
|
<pom refid="hadoop.mapred.test"/>
|
|
<attach file="${hadoop-mapred-test-sources.jar}" classifier="sources" />
|
|
</artifact:install>
|
|
</target>
|
|
|
|
<target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test,
|
|
jar-system, jar-test-system, set-version, signanddeploy, simpledeploy"
|
|
description="To deploy hadoop mapredice and test jar's to apache
|
|
snapshot's repository"/>
|
|
|
|
<target name="signanddeploy" if="staging" depends="sign">
|
|
<artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
|
|
<artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
|
|
<artifact:pom file="${hadoop-mapred-instrumented.pom}"
|
|
id="hadoop.mapred.${herriot.suffix}"/>
|
|
<artifact:pom file="${hadoop-mapred-instrumented-test.pom}"
|
|
id="hadoop.mapred.${herriot.suffix}.test"/>
|
|
<artifact:install-provider artifactId="wagon-http"
|
|
version="${wagon-http.version}"/>
|
|
|
|
<artifact:deploy file="${hadoop-mapred.jar}">
|
|
<remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
|
|
<pom refid="hadoop.mapred"/>
|
|
<attach file="${hadoop-mapred.jar}.asc" type="jar.asc"/>
|
|
<attach file="${hadoop-mapred.pom}.asc" type="pom.asc"/>
|
|
<attach file="${hadoop-mapred-sources.jar}.asc" type="jar.asc"
|
|
classifier="sources" />
|
|
<attach file="${hadoop-mapred-sources.jar}" classifier="sources"/>
|
|
</artifact:deploy>
|
|
|
|
<artifact:deploy file="${hadoop-mapred-test.jar}">
|
|
<remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
|
|
<pom refid="hadoop.mapred.test"/>
|
|
<attach file="${hadoop-mapred-test.jar}.asc" type="jar.asc"/>
|
|
<attach file="${hadoop-mapred-test.pom}.asc" type="pom.asc"/>
|
|
<attach file="${hadoop-mapred-test-sources.jar}.asc" type="jar.asc"
|
|
classifier="sources"/>
|
|
<attach file="${hadoop-mapred-test-sources.jar}" classifier="sources"/>
|
|
</artifact:deploy>
|
|
|
|
<artifact:deploy file="${hadoop-mapred-instrumented.jar}">
|
|
<remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
|
|
<pom refid="hadoop.mapred.${herriot.suffix}"/>
|
|
<attach file="${hadoop-mapred-instrumented.jar}.asc" type="jar.asc"/>
|
|
<attach file="${hadoop-mapred-instrumented.pom}.asc" type="pom.asc"/>
|
|
<attach file="${hadoop-mapred-instrumented-sources.jar}.asc"
|
|
type="jar.asc" classifier="sources"/>
|
|
<attach file="${hadoop-mapred-instrumented-sources.jar}"
|
|
classifier="sources"/>
|
|
</artifact:deploy>
|
|
|
|
<artifact:deploy file="${hadoop-mapred-instrumented-test.jar}">
|
|
<remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
|
|
<pom refid="hadoop.mapred.${herriot.suffix}.test"/>
|
|
<attach file="${hadoop-mapred-instrumented-test.jar}.asc" type="jar.asc"/>
|
|
<attach file="${hadoop-mapred-instrumented-test.pom}.asc" type="pom.asc"/>
|
|
<attach file="${hadoop-mapred-instrumented-test-sources.jar}.asc"
|
|
type="jar.asc" classifier="sources"/>
|
|
<attach file="${hadoop-mapred-instrumented-test-sources.jar}"
|
|
classifier="sources"/>
|
|
</artifact:deploy>
|
|
</target>
|
|
|
|
<target name="sign" depends="clean-sign" if="staging">
|
|
<input message="password:>" addproperty="gpg.passphrase">
|
|
<handler classname="org.apache.tools.ant.input.SecureInputHandler" />
|
|
</input>
|
|
<macrodef name="sign-artifact" description="Signs the artifact">
|
|
<attribute name="input.file"/>
|
|
<attribute name="output.file" default="@{input.file}.asc"/>
|
|
<attribute name="gpg.passphrase"/>
|
|
<sequential>
|
|
<echo>Signing @{input.file} Sig File: @{output.file}</echo>
|
|
<exec executable="gpg" >
|
|
<arg value="--armor"/>
|
|
<arg value="--output"/>
|
|
<arg value="@{output.file}"/>
|
|
<arg value="--passphrase"/>
|
|
<arg value="@{gpg.passphrase}"/>
|
|
<arg value="--detach-sig"/>
|
|
<arg value="@{input.file}"/>
|
|
</exec>
|
|
</sequential>
|
|
</macrodef>
|
|
<sign-artifact input.file="${hadoop-mapred.jar}"
|
|
output.file="${hadoop-mapred.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-test.jar}"
|
|
output.file="${hadoop-mapred-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-sources.jar}"
|
|
output.file="${hadoop-mapred-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-test-sources.jar}"
|
|
output.file="${hadoop-mapred-test-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred.pom}"
|
|
output.file="${hadoop-mapred.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-test.pom}"
|
|
output.file="${hadoop-mapred-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-instrumented.jar}"
|
|
output.file="${hadoop-mapred-instrumented.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-instrumented.pom}"
|
|
output.file="${hadoop-mapred-instrumented.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-instrumented-sources.jar}"
|
|
output.file="${hadoop-mapred-instrumented-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-instrumented-test.jar}"
|
|
output.file="${hadoop-mapred-instrumented-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-instrumented-test.pom}"
|
|
output.file="${hadoop-mapred-instrumented-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
<sign-artifact input.file="${hadoop-mapred-instrumented-test-sources.jar}"
|
|
output.file="${hadoop-mapred-instrumented-test-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
|
|
</target>
|
|
|
|
<target name="simpledeploy" unless="staging">
|
|
<artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
|
|
<artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
|
|
<artifact:pom file="${hadoop-mapred-instrumented.pom}"
|
|
id="hadoop.mapred.${herriot.suffix}"/>
|
|
|
|
<artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
|
|
<artifact:deploy file="${hadoop-mapred.jar}">
|
|
<remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
|
|
<pom refid="hadoop.mapred"/>
|
|
<attach file="${hadoop-mapred-sources.jar}" classifier="sources" />
|
|
</artifact:deploy>
|
|
|
|
<artifact:deploy file="${hadoop-mapred-test.jar}">
|
|
<remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
|
|
<pom refid="hadoop.mapred.test"/>
|
|
<attach file="${hadoop-mapred-test-sources.jar}" classifier="sources" />
|
|
</artifact:deploy>
|
|
|
|
<artifact:deploy file="${hadoop-mapred-instrumented.jar}">
|
|
<remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
|
|
<pom refid="hadoop.mapred.${herriot.suffix}"/>
|
|
<attach file="${hadoop-mapred-instrumented-sources.jar}" classifier="sources" />
|
|
</artifact:deploy>
|
|
</target>
|
|
|
|
<target name="set-version">
|
|
<delete file="${basedir}/ivy/hadoop-mapred.xml"/>
|
|
<delete file="${basedir}/ivy/hadoop-mapred-test.xml"/>
|
|
<delete file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}.xml"/>
|
|
<delete file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-test.xml"/>
|
|
<copy file="${basedir}/ivy/hadoop-mapred-template.xml" tofile="${basedir}/ivy/hadoop-mapred.xml"/>
|
|
<copy file="${basedir}/ivy/hadoop-mapred-test-template.xml" tofile="${basedir}/ivy/hadoop-mapred-test.xml"/>
|
|
<copy file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-template.xml"
|
|
tofile="${basedir}/ivy/hadoop-mapred-${herriot.suffix}.xml"/>
|
|
<copy file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-test-template.xml"
|
|
tofile="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-test.xml"/>
|
|
<replaceregexp byline="true">
|
|
<regexp pattern="@version"/>
|
|
<substitution expression="${version}"/>
|
|
<fileset dir="${basedir}/ivy">
|
|
<include name="hadoop-mapred.xml"/>
|
|
<include name="hadoop-mapred-test.xml"/>
|
|
<include name="hadoop-mapred-${herriot.suffix}.xml"/>
|
|
<include name="hadoop-mapred-${herriot.suffix}-test.xml"/>
|
|
</fileset>
|
|
</replaceregexp>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Perform audit activities for the release -->
|
|
<!-- ================================================================== -->
|
|
<target name="rats-taskdef" depends="ivy-retrieve-releaseaudit">
|
|
<typedef format="xml" resource="org/apache/rat/anttasks/antlib.xml" uri="antlib:org.apache.rat.anttasks"
|
|
classpathref="releaseaudit-classpath"/>
|
|
</target>
|
|
|
|
<!--<target name="releaseaudit" depends="package, rats-taskdef" description="Release Audit activities"> -->
|
|
<target name="releaseaudit" depends="package, rats-taskdef" description="Release Audit activities">
|
|
<rat:report xmlns:rat="antlib:org.apache.rat.anttasks">
|
|
<fileset dir="${dist.dir}">
|
|
<exclude name="CHANGES.txt"/>
|
|
<exclude name="**/conf/*"/>
|
|
<exclude name="**/docs/"/>
|
|
<exclude name="**/VERSION"/>
|
|
<exclude name="webapps/**/WEB-INF/web.xml"/>
|
|
<exclude name="lib/jdiff/"/>
|
|
<exclude name="src/test/all-tests"/>
|
|
<exclude name="src/test/commit-tests"/>
|
|
<exclude name="src/test/empty-file"/>
|
|
<exclude name="src/test/mapred/org/apache/hadoop/mapred/test.tgz"/>
|
|
<exclude name="src/test/tools/data/rumen/**/*"/>
|
|
<exclude name="src/test/mapred/org/apache/hadoop/mapred/*.txt"/>
|
|
<exclude name="src/contrib/mumak/src/test/data/*.json"/>
|
|
<exclude name="src/contrib/index/sample/*.txt"/>
|
|
<exclude name="src/test/mapred/org/apache/hadoop/cli/data60bytes"/>
|
|
<exclude name="src/examples/org/apache/hadoop/examples/dancing/puzzle1.dta"/>
|
|
<exclude name="src/contrib/eclipse-plugin/META-INF/MANIFEST.MF"/>
|
|
<exclude name="src/c++/librecordio/*"/>
|
|
<exclude name="src/c++/pipes/*"/>
|
|
<exclude name="src/c++/utils/*"/>
|
|
<exclude name="src/c++/task-controller/*"/>
|
|
<exclude name="src/examples/pipes/*"/>
|
|
<exclude name="src/c++/pipes/debug/*"/>
|
|
</fileset>
|
|
</rat:report>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Clean. Delete the build files, and their directories -->
|
|
<!-- ================================================================== -->
|
|
<target name="clean" depends="clean-contrib, clean-fi, clean-sign" description="Clean. Delete the build files, and their directories">
|
|
<delete dir="${build.dir}"/>
|
|
<delete dir="${docs.src}/build"/>
|
|
<delete file="${hadoop-mapred.pom}"/>
|
|
<delete file="${hadoop-mapred-test.pom}"/>
|
|
<delete file="${hadoop-mapred-instrumented.pom}"/>
|
|
<delete file="${hadoop-mapred-instrumented-test.pom}"/>
|
|
<delete file="${hadoop-mapred-examples.pom}"/>
|
|
<delete file="${hadoop-mapred-tools.pom}"/>
|
|
</target>
|
|
|
|
<target name="clean-sign" description="Clean. Delete .asc files">
|
|
<delete>
|
|
<fileset dir="." includes="**/**/*.asc"/>
|
|
</delete>
|
|
</target>
|
|
|
|
<target name="veryclean" depends="clean-cache,clean"
|
|
description="veryclean. Delete ivy and ant maven task jar">
|
|
<delete file="${ant_task.jar}"/>
|
|
<delete file="${ivy.jar}"/>
|
|
</target>
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- Clean contrib target. For now, must be called explicitly -->
|
|
<!-- Using subant instead of ant as a workaround for 30569 -->
|
|
<!-- ================================================================== -->
|
|
<target name="clean-contrib">
|
|
<subant target="clean">
|
|
<fileset file="src/contrib/build.xml"/>
|
|
</subant>
|
|
</target>
|
|
|
|
|
|
<!-- ================================================================== -->
|
|
<!-- librecordio targets. -->
|
|
<!-- ================================================================== -->
|
|
|
|
<target name="compile-librecordio" depends="init" if="librecordio" >
|
|
<mkdir dir="${build.librecordio}"/>
|
|
<exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
|
|
<env key="XERCESCROOT" value="${xercescroot}"/>
|
|
<env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="test-librecordio" depends="compile-librecordio, compile-core" if="librecordio">
|
|
<delete dir="${librecordio.test.dir}"/>
|
|
<mkdir dir="${librecordio.test.dir}"/>
|
|
<exec dir="${librecordio.src}/test" executable="${make.cmd}" failonerror="true">
|
|
<env key="HADOOP_HOME" value="${basedir}"/>
|
|
<env key="XERCESCROOT" value="${xercescroot}"/>
|
|
<env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
|
|
<env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/>
|
|
<arg value="all"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="package-librecordio" depends="compile-librecordio" if="librecordio">
|
|
<mkdir dir="${dist.dir}/lib"/>
|
|
<copy todir="${dist.dir}/lib">
|
|
<fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false">
|
|
<exclude name="**/tests/**"/>
|
|
<exclude name="*.so"/>
|
|
<exclude name="*.o"/>
|
|
</fileset>
|
|
</copy>
|
|
<chmod perm="ugo+x" type="file">
|
|
<fileset dir="${dist.dir}/lib"/>
|
|
</chmod>
|
|
</target>
|
|
|
|
<target name="check-c++-configure" depends="init" if="compile.c++">
|
|
<condition property="need.c++.utils.configure">
|
|
<not> <available file="${c++.utils.src}/configure"/> </not>
|
|
</condition>
|
|
<condition property="need.c++.pipes.configure">
|
|
<not> <available file="${c++.pipes.src}/configure"/> </not>
|
|
</condition>
|
|
<condition property="need.c++.examples.pipes.configure">
|
|
<not> <available file="${c++.examples.pipes.src}/configure"/> </not>
|
|
</condition>
|
|
<condition property="need.c++.task-controller.configure">
|
|
<not> <available file="${c++.task-controller.src}/configure"/> </not>
|
|
</condition>
|
|
</target>
|
|
|
|
<target name="create-c++-utils-configure" depends="check-c++-configure"
|
|
if="need.c++.utils.configure">
|
|
<exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
|
|
failonerror="yes">
|
|
<arg value="-i"/>
|
|
<arg value="-f"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="create-c++-pipes-configure" depends="check-c++-configure"
|
|
if="need.c++.pipes.configure">
|
|
<exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes"
|
|
failonerror="yes">
|
|
<arg value="-i"/>
|
|
<arg value="-f"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="create-c++-examples-pipes-configure" depends="check-c++-configure"
|
|
if="need.c++.examples.pipes.configure">
|
|
<exec executable="autoreconf" dir="${c++.examples.pipes.src}"
|
|
searchpath="yes" failonerror="yes">
|
|
<arg value="-i"/>
|
|
<arg value="-f"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="create-c++-task-controller-configure" depends="check-c++-configure"
|
|
if="need.c++.task-controller.configure">
|
|
<exec executable="autoreconf" dir="${c++.task-controller.src}"
|
|
searchpath="yes" failonerror="yes">
|
|
<arg value="-i"/>
|
|
<arg value="-f"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="create-c++-configure" depends="create-c++-utils-configure,
|
|
create-c++-pipes-configure,
|
|
create-c++-examples-pipes-configure,
|
|
create-c++-task-controller-configure"
|
|
if="compile.c++">
|
|
</target>
|
|
|
|
<target name="check-c++-makefiles" depends="init" if="compile.c++">
|
|
<condition property="need.c++.utils.makefile">
|
|
<not> <available file="${build.c++.utils}/Makefile"/> </not>
|
|
</condition>
|
|
<condition property="need.c++.pipes.makefile">
|
|
<not> <available file="${build.c++.pipes}/Makefile"/> </not>
|
|
</condition>
|
|
<condition property="need.c++.examples.pipes.makefile">
|
|
<not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
|
|
</condition>
|
|
</target>
|
|
|
|
<target name="create-c++-utils-makefile" depends="check-c++-makefiles"
|
|
if="need.c++.utils.makefile">
|
|
<antcall target="create-c++-utils-configure"/>
|
|
<mkdir dir="${build.c++.utils}"/>
|
|
<chmod file="${c++.utils.src}/configure" perm="ugo+x"/>
|
|
<exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
|
|
failonerror="yes">
|
|
<arg value="--prefix=${install.c++}"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="compile-c++-utils" depends="create-c++-utils-makefile"
|
|
if="compile.c++">
|
|
<exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes"
|
|
failonerror="yes">
|
|
<arg value="install"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="create-c++-pipes-makefile" depends="check-c++-makefiles"
|
|
if="need.c++.pipes.makefile">
|
|
<antcall target="create-c++-pipes-configure"/>
|
|
<mkdir dir="${build.c++.pipes}"/>
|
|
<chmod file="${c++.pipes.src}/configure" perm="ugo+x"/>
|
|
<exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
|
|
failonerror="yes">
|
|
<arg value="--prefix=${install.c++}"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="compile-c++-pipes"
|
|
depends="create-c++-pipes-makefile,compile-c++-utils"
|
|
if="compile.c++">
|
|
<exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes"
|
|
failonerror="yes">
|
|
<arg value="install"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="compile-c++"
|
|
depends="compile-c++-pipes"/>
|
|
|
|
<target name="create-c++-examples-pipes-makefile"
|
|
depends="check-c++-makefiles"
|
|
if="need.c++.examples.pipes.makefile">
|
|
<antcall target="create-c++-examples-pipes-configure"/>
|
|
<mkdir dir="${build.c++.examples.pipes}"/>
|
|
<chmod file="${c++.examples.pipes.src}/configure" perm="ugo+x"/>
|
|
<exec executable="${c++.examples.pipes.src}/configure"
|
|
dir="${build.c++.examples.pipes}"
|
|
failonerror="yes">
|
|
<arg value="--prefix=${install.c++.examples}"/>
|
|
<arg value="--with-hadoop-utils=${install.c++}"/>
|
|
<arg value="--with-hadoop-pipes=${install.c++}"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="compile-c++-examples-pipes"
|
|
depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
|
|
if="compile.c++">
|
|
<exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes"
|
|
failonerror="yes">
|
|
<arg value="install"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="compile-c++-examples"
|
|
depends="compile-c++-examples-pipes"/>
|
|
|
|
<target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. To use, specify -Dclover.home=<base of clover installation> -Drun.clover=true on the command line."/>
|
|
|
|
<target name="clover.setup" if="clover.enabled">
|
|
<taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
|
|
<mkdir dir="${clover.db.dir}"/>
|
|
<clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
|
|
<fileset dir="${src.dir}" includes="tools/**/* java/**/*"/>
|
|
<testsources dir="${test.src.dir}" />
|
|
</clover-setup>
|
|
</target>
|
|
|
|
<target name="clover.info" unless="clover.present">
|
|
<echo>
|
|
Clover not found. Code coverage reports disabled.
|
|
</echo>
|
|
</target>
|
|
|
|
<target name="clover.check">
|
|
<fail unless="clover.present">
|
|
##################################################################
|
|
Clover not found.
|
|
Please specify -Dclover.home=<base of clover installation>
|
|
on the command line.
|
|
##################################################################
|
|
</fail>
|
|
</target>
|
|
|
|
<target name="generate-clover-reports" depends="clover.check, clover">
|
|
<mkdir dir="${clover.report.dir}"/>
|
|
<clover-report>
|
|
<current outfile="${clover.report.dir}" title="${final.name}">
|
|
<format type="html"/>
|
|
</current>
|
|
</clover-report>
|
|
<clover-report>
|
|
<current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
|
|
<format type="xml"/>
|
|
</current>
|
|
</clover-report>
|
|
</target>
|
|
|
|
<target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
|
|
<fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." />
|
|
</target>
|
|
|
|
<target name="patch.check" unless="patch.file">
|
|
<fail message="'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." />
|
|
</target>
|
|
|
|
<target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
|
|
<exec executable="bash" failonerror="true">
|
|
<arg value="${basedir}/src/test/bin/test-patch.sh"/>
|
|
<arg value="DEVELOPER"/>
|
|
<arg value="${patch.file}"/>
|
|
<arg value="${scratch.dir}"/>
|
|
<arg value="${svn.cmd}"/>
|
|
<arg value="${grep.cmd}"/>
|
|
<arg value="${patch.cmd}"/>
|
|
<arg value="${findbugs.home}"/>
|
|
<arg value="${forrest.home}"/>
|
|
<arg value="${basedir}"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<target name="hudson-test-patch" depends="findbugs.check,forrest.check">
|
|
<exec executable="bash" failonerror="true">
|
|
<arg value="${basedir}/src/test/bin/test-patch.sh"/>
|
|
<arg value="HUDSON"/>
|
|
<arg value="${scratch.dir}"/>
|
|
<arg value="${support.dir}"/>
|
|
<arg value="${ps.cmd}"/>
|
|
<arg value="${wget.cmd}"/>
|
|
<arg value="${jiracli.cmd}"/>
|
|
<arg value="${svn.cmd}"/>
|
|
<arg value="${grep.cmd}"/>
|
|
<arg value="${patch.cmd}"/>
|
|
<arg value="${findbugs.home}"/>
|
|
<arg value="${forrest.home}"/>
|
|
<arg value="${eclipse.home}"/>
|
|
<arg value="${basedir}"/>
|
|
<arg value="${jira.passwd}"/>
|
|
<arg value="${curl.cmd}"/>
|
|
<arg value="${defect}"/>
|
|
</exec>
|
|
</target>
|
|
|
|
<condition property="ant-eclipse.jar.exists">
|
|
<available file="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar"/>
|
|
</condition>
|
|
|
|
<target name="ant-eclipse-download" unless="ant-eclipse.jar.exists"
|
|
description="Downloads the ant-eclipse binary.">
|
|
<get src="http://downloads.sourceforge.net/project/ant-eclipse/ant-eclipse/1.0/ant-eclipse-1.0.bin.tar.bz2"
|
|
dest="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" usetimestamp="false" />
|
|
|
|
<untar src="${build.dir}/ant-eclipse-1.0.bin.tar.bz2"
|
|
dest="${build.dir}" compression="bzip2">
|
|
<patternset>
|
|
<include name="lib/ant-eclipse-1.0-jvm1.2.jar"/>
|
|
</patternset>
|
|
</untar>
|
|
<delete file="${build.dir}/java/ant-eclipse-1.0.bin.tar.bz2" />
|
|
</target>
|
|
|
|
<target name="eclipse"
|
|
depends="init,ant-eclipse-download,ivy-retrieve-common,ivy-retrieve-mapred,ivy-retrieve-test"
|
|
description="Create eclipse project files">
|
|
<pathconvert property="eclipse.project">
|
|
<path path="${basedir}"/>
|
|
<regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
|
|
</pathconvert>
|
|
<taskdef name="eclipse"
|
|
classname="prantl.ant.eclipse.EclipseTask"
|
|
classpath="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar" />
|
|
<eclipse updatealways="true">
|
|
<project name="${eclipse.project}" />
|
|
<classpath>
|
|
<source path="${java.src.dir}"
|
|
output="${build.dir.eclipse-main-classes}" />
|
|
<source path="${build.src}"
|
|
output="${build.dir.eclipse-main-generated-classes}" />
|
|
<source path="${test.src.dir}/mapred"
|
|
output="${build.dir.eclipse-test-classes}" />
|
|
<source path="${test.src.dir}/aop"
|
|
output="${build.dir.eclipse-test-classes}" />
|
|
<source path="${test.src.dir}/unit"
|
|
output="${build.dir.eclipse-test-classes}" />
|
|
<source path="${examples.dir}"
|
|
output="${build.dir.eclipse-example-classes}" />
|
|
<source path="${tools.src}"
|
|
output="${build.dir.eclipse-tools-classes}" />
|
|
<source path="${contrib.dir}/block_forensics/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/block_forensics/main" />
|
|
<source path="${contrib.dir}/capacity-scheduler/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/capacity-scheduler/main" />
|
|
<source path="${contrib.dir}/capacity-scheduler/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/capacity-scheduler/test" />
|
|
<source path="${contrib.dir}/data_join/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/data_join/main" />
|
|
<source path="${contrib.dir}/data_join/src/examples"
|
|
output="${build.dir.eclipse-contrib-classes}/data_join/examples" />
|
|
<source path="${contrib.dir}/data_join/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/data_join/test" />
|
|
<source path="${contrib.dir}/dynamic-scheduler/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/dynamic-scheduler/main" />
|
|
<source path="${contrib.dir}/dynamic-scheduler/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/dynamic-scheduler/test" />
|
|
<source path="${contrib.dir}/fairscheduler/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/fairscheduler/main" />
|
|
<source path="${contrib.dir}/fairscheduler/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/fairscheduler/test" />
|
|
<source path="${contrib.dir}/gridmix/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/gridmix/main" />
|
|
<source path="${contrib.dir}/gridmix/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/gridmix/test" />
|
|
<source path="${contrib.dir}/mumak/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/mumak/main" />
|
|
<source path="${contrib.dir}/mumak/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/mumak/test" />
|
|
<source path="${contrib.dir}/raid/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/raid/main" />
|
|
<source path="${contrib.dir}/raid/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/raid/test" />
|
|
<source path="${contrib.dir}/streaming/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/streaming/main" />
|
|
<source path="${contrib.dir}/streaming/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/streaming/test" />
|
|
<source path="${contrib.dir}/vaidya/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/vaidya/main" />
|
|
<source path="${contrib.dir}/vertica/src/java"
|
|
output="${build.dir.eclipse-contrib-classes}/vertica/main" />
|
|
<source path="${contrib.dir}/vertica/src/test"
|
|
output="${build.dir.eclipse-contrib-classes}/vertica/test" />
|
|
<output path="${build.dir.eclipse-main-classes}" />
|
|
<library pathref="ivy-common.classpath" exported="true" />
|
|
<library pathref="ivy-mapred.classpath" exported="true" />
|
|
<library pathref="ivy-test.classpath" exported="false" />
|
|
<library path="${conf.dir}" exported="false" />
|
|
</classpath>
|
|
</eclipse>
|
|
<copy todir="." overwrite="true">
|
|
<fileset dir=".eclipse.templates">
|
|
<exclude name="**/README.txt"/>
|
|
</fileset>
|
|
<filterset>
|
|
<filter token="PROJECT" value="${eclipse.project}"/>
|
|
</filterset>
|
|
</copy>
|
|
</target>
|
|
|
|
<target name="ivy-init-dirs">
|
|
<mkdir dir="${build.ivy.dir}" />
|
|
<mkdir dir="${build.ivy.lib.dir}" />
|
|
<mkdir dir="${build.ivy.report.dir}" />
|
|
<mkdir dir="${build.ivy.maven.dir}" />
|
|
</target>
|
|
|
|
<target name="ivy-probe-antlib" >
|
|
<condition property="ivy.found">
|
|
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
|
|
</condition>
|
|
</target>
|
|
|
|
<target name="ivy-download" description="To download ivy" unless="offline">
|
|
<get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
|
|
</target>
|
|
|
|
<!--
|
|
To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
|
|
Also note how we skip loading Ivy if it is already there, just to make sure all is well.
|
|
-->
|
|
<target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
|
|
<typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
|
|
loaderRef="ivyLoader">
|
|
<classpath>
|
|
<pathelement location="${ivy.jar}"/>
|
|
</classpath>
|
|
</typedef>
|
|
<fail >
|
|
<condition >
|
|
<not>
|
|
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
|
|
</not>
|
|
</condition>
|
|
You need Apache Ivy 2.0 or later from http://ant.apache.org/
|
|
It could not be loaded from ${ivy_repo_url}
|
|
</fail>
|
|
</target>
|
|
|
|
|
|
<property name="ivyresolvelog" value="download-only"/>
|
|
<property name="ivyretrievelog" value="quiet"/>
|
|
|
|
<target name="ivy-init" depends="ivy-init-antlib" >
|
|
|
|
<!--Configure Ivy by reading in the settings file
|
|
If anyone has already read in a settings file into this settings ID, it gets priority
|
|
-->
|
|
<ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-javadoc" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-releaseaudit" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-test" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-common" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-package" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="package"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-mapred" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="mapred"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-jdiff" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-checkstyle" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-resolve-system" depends="ivy-init">
|
|
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="system"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve" depends="ivy-resolve"
|
|
description="Retrieve Ivy-managed artifacts">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
|
|
description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
|
|
description="Retrieve Ivy-managed artifacts for the javadoc configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
|
|
description="Retrieve Ivy-managed artifacts for the javadoc configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-test" depends="ivy-resolve-test"
|
|
description="Retrieve Ivy-managed artifacts for the test configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="ivy-test.classpath" conf="test"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-common" depends="ivy-resolve-common"
|
|
description="Retrieve Ivy-managed artifacts for the compile configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-package" depends="ivy-resolve-package"
|
|
description="Retrieve Ivy-managed artifacts for the package configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyretrievelog}"/>
|
|
<ivy:cachepath pathid="ivy-package.classpath" conf="package"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-mapred" depends="ivy-resolve-mapred"
|
|
description="Retrieve Ivy-managed artifacts for the mapred configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="ivy-mapred.classpath" conf="mapred"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
|
|
description="Retrieve Ivy-managed artifacts for the compile configurations">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
|
|
</target>
|
|
|
|
<target name="ivy-retrieve-system" depends="ivy-resolve-system"
|
|
description="Retrieve Ivy-managed artifacts for the system tests">
|
|
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
|
|
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
|
|
log="${ivyresolvelog}"/>
|
|
<ivy:cachepath pathid="ivy-system.classpath" conf="system"/>
|
|
</target>
|
|
|
|
<target name="ivy-report" depends="ivy-resolve-releaseaudit"
|
|
description="Generate">
|
|
<ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
|
|
<echo>
|
|
Reports generated:${build.ivy.report.dir}
|
|
</echo>
|
|
</target>
|
|
|
|
<!-- taskcontroller targets -->
|
|
<target name="init-task-controller-build">
|
|
<antcall target="create-c++-task-controller-configure"/>
|
|
<mkdir dir="${build.c++.task-controller}" />
|
|
<copy todir="${build.c++.task-controller}">
|
|
<fileset dir="${c++.task-controller.src}" includes="*.c"/>
|
|
<fileset dir="${c++.task-controller.src}" includes="*.h"/>
|
|
</copy>
|
|
<chmod file="${c++.task-controller.src}/configure" perm="ugo+x"/>
|
|
<condition property="task-controller.conf.dir.passed">
|
|
<not>
|
|
<equals arg1="${hadoop.conf.dir}" arg2="$${hadoop.conf.dir}"/>
|
|
</not>
|
|
</condition>
|
|
</target>
|
|
<target name="configure-task-controller" depends="init,
|
|
init-task-controller-build,
|
|
task-controller-configuration-with-confdir,
|
|
task-controller-configuration-with-no-confdir">
|
|
</target>
|
|
<target name="task-controller-configuration-with-confdir"
|
|
if="task-controller.conf.dir.passed" >
|
|
<exec executable="${c++.task-controller.src}/configure"
|
|
dir="${build.c++.task-controller}" failonerror="yes">
|
|
<arg value="--prefix=${task-controller.install.dir}" />
|
|
<arg value="--with-confdir=${hadoop.conf.dir}" />
|
|
</exec>
|
|
</target>
|
|
<target name="task-controller-configuration-with-no-confdir"
|
|
unless="task-controller.conf.dir.passed">
|
|
<exec executable="${c++.task-controller.src}/configure"
|
|
dir="${build.c++.task-controller}" failonerror="yes">
|
|
<arg value="--prefix=${task-controller.install.dir}" />
|
|
</exec>
|
|
</target>
|
|
<!--
|
|
* Create the installation directory.
|
|
* Do a make install.
|
|
-->
|
|
<target name="task-controller" depends="configure-task-controller">
|
|
<mkdir dir="${task-controller.install.dir}" />
|
|
<exec executable="${make.cmd}" dir="${build.c++.task-controller}"
|
|
searchpath="yes" failonerror="yes">
|
|
<arg value="install" />
|
|
</exec>
|
|
</target>
|
|
<target name="test-task-controller" depends="task-controller">
|
|
<copy todir="${build.c++.task-controller}" verbose="true">
|
|
<fileset dir="${c++.task-controller.src}" includes="tests/"/>
|
|
</copy>
|
|
<exec executable="${make.cmd}" dir="${build.c++.task-controller}"
|
|
searchpath="yes" failonerror="yes">
|
|
<arg value="clean" />
|
|
<arg value="test" />
|
|
</exec>
|
|
<exec executable="${build.c++.task-controller}/tests/test-task-controller"
|
|
dir="${build.c++.task-controller}/tests/"
|
|
failonerror="yes">
|
|
</exec>
|
|
</target>
|
|
<!-- end of task-controller targets -->
|
|
|
|
<!-- Begining of fault-injection targets-->
|
|
<import file="${test.src.dir}/aop/build/aop.xml"/>
|
|
|
|
<!-- declaring mapred.src.dir as java.src.dir for aop.xml -->
|
|
<property name="java.src.dir" value="${src.dir}/java"/>
|
|
|
|
<!-- target dependency from aop.xml -->
|
|
<target name="-classes-compilation"
|
|
depends="compile-mapred-classes, compile-mapred-test"/>
|
|
|
|
<target name="jar-test-fault-inject" depends="jar-mapred-test-fault-inject"
|
|
description="Make hadoop-mapred-test-fi.jar files"/>
|
|
|
|
<!-- target to build test-fi.jar-->
|
|
<target name="jar-mapred-test-fault-inject" depends="injectfaults"
|
|
description="Make hadoop-mapred-test-fi.jar">
|
|
<macro-jar-test-fault-inject target.name="jar-test"
|
|
jar.final.name="test.final.name"
|
|
jar.final.value="${name}-test-${version}-fi"/>
|
|
</target>
|
|
|
|
<!-- target to build the hadoop-fi.jar -->
|
|
<target name="jar-fault-inject" depends="injectfaults"
|
|
description="Make hadoop-fi.jar">
|
|
<macro-jar-fault-inject
|
|
target.name="jar"
|
|
build.dir="${build-fi.dir}"
|
|
jar.final.name="final.name"
|
|
jar.final.value="${final.name}-fi" />
|
|
</target>
|
|
|
|
<!-- target to run fault injected test cases will run entire mapred test
|
|
suite-->
|
|
<target name="run-test-mapred-fault-inject" depends="injectfaults"
|
|
description="Run full suite of unit tests with fault injection">
|
|
<macro-run-tests-fault-inject target.name="run-test-mapred"
|
|
testcasesonly="false"/>
|
|
</target>
|
|
|
|
<!-- target to run non-FI tests in a FI environment-->
|
|
<target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
|
|
<fail unless="testcase">
|
|
Can't run this target without -Dtestcase setting!
|
|
</fail>
|
|
<macro-run-tests-fault-inject target.name="run-test-mapred"
|
|
testcasesonly="true"/>
|
|
</target>
|
|
<condition property="tests.notestcase">
|
|
<and>
|
|
<isfalse value="${test.fault.inject}"/>
|
|
<not>
|
|
<isset property="testcase"/>
|
|
</not>
|
|
</and>
|
|
</condition>
|
|
<condition property="tests.notestcase.fi">
|
|
<and>
|
|
<not>
|
|
<isset property="testcase"/>
|
|
</not>
|
|
<istrue value="${test.fault.inject}"/>
|
|
</and>
|
|
</condition>
|
|
<condition property="test.testcase">
|
|
<and>
|
|
<isfalse value="${test.fault.inject}"/>
|
|
<isset property="testcase"/>
|
|
</and>
|
|
</condition>
|
|
<condition property="tests.testcaseonly.fi">
|
|
<istrue value="${special.fi.testcasesonly}" />
|
|
</condition>
|
|
<condition property="tests.testcase.fi">
|
|
<and>
|
|
<istrue value="${test.fault.inject}" />
|
|
<isset property="testcase" />
|
|
<isfalse value="${special.fi.testcasesonly}" />
|
|
</and>
|
|
</condition>
|
|
<!-- End of fault injection targets-->
|
|
|
|
</project>
|