Merge trunk into HDFS-1623 post-mavenization.

- added CHANGES.HDFS-1623.txt to findbugs exclude
- added jsch dependency


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1159782 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Todd Lipcon 2011-08-19 20:47:40 +00:00
commit b0ea4b71c9
3672 changed files with 141966 additions and 3771 deletions

2
.gitignore vendored
View File

@ -4,4 +4,6 @@
.idea
.svn
.classpath
.project
.settings
target

View File

@ -1,3 +1,6 @@
Build instructions for Hadoop Common/HDFS using Maven
----------------------------------------------------------------------------------
Requirements:
@ -16,7 +19,8 @@ Maven modules:
- hadoop-project (Parent POM for all Hadoop Maven modules. )
(All plugins & dependencies versions are defined here.)
- hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
- hadoop-common (Hadoop common)
- hadoop-common (Hadoop Common)
- hadoop-hdfs (Hadoop HDFS)
----------------------------------------------------------------------------------
Where to run Maven from?
@ -57,5 +61,4 @@ Maven build goals:
* -Dtest.exclude=<TESTCLASSNAME>
* -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
----------------------------------------------------------------------------------

View File

@ -59,15 +59,15 @@ PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
if [[ -d hadoop-common ]]; then
echo Looks like this is being run at project root
# if all of the lines start with hadoop-common/, hdfs/, or mapreduce/, this is
# if all of the lines start with hadoop-common/, hadoop-hdfs/, or mapreduce/, this is
# relative to the hadoop root instead of the subproject root, so we need
# to chop off another layer
elif [[ "$PREFIX_DIRS" =~ ^(hdfs|hadoop-common|mapreduce)$ ]]; then
elif [[ "$PREFIX_DIRS" =~ ^(hadoop-hdfs|hadoop-common|mapreduce)$ ]]; then
echo Looks like this is relative to project root. Increasing PLEVEL
PLEVEL=$[$PLEVEL + 1]
elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hdfs\|mapreduce' ; then
elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hadoop-hdfs\|mapreduce' ; then
echo Looks like this is a cross-subproject patch. Try applying from the project root
exit 1
fi

View File

@ -24,9 +24,9 @@
<fileSet>
<directory>${basedir}/src/main/bin</directory>
<outputDirectory>/bin</outputDirectory>
<includes>
<include>hadoop</include>
</includes>
<excludes>
<exclude>*.sh</exclude>
</excludes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
@ -37,7 +37,7 @@
<directory>${basedir}/src/main/bin</directory>
<outputDirectory>/libexec</outputDirectory>
<includes>
<include>hadoop-config.sh</include>
<include>*-config.sh</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@ -68,7 +68,7 @@
</includes>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/webapps</directory>
<directory>${project.build.directory}/webapps</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
</fileSet>
<fileSet>
@ -101,7 +101,7 @@
<dependencySet>
<outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
<unpack>false</unpack>
<scope>compile</scope>
<scope>runtime</scope>
<useProjectArtifact>false</useProjectArtifact>
<excludes>
<exclude>org.apache.ant:*:jar</exclude>

View File

@ -40,7 +40,7 @@
<directory>${basedir}/src/main/bin</directory>
<outputDirectory>/libexec</outputDirectory>
<includes>
<include>hadoop-config.sh</include>
<include>*-config.sh</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@ -51,6 +51,16 @@
<fileSet>
<directory>${basedir}/src/main/webapps</directory>
<outputDirectory>/webapps</outputDirectory>
<excludes>
<exclude>proto-*-web.xml</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>${project.build.directory}/webapps</directory>
<outputDirectory>/webapps</outputDirectory>
<excludes>
<exclude>proto-*-web.xml</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>${project.build.directory}/site</directory>
@ -73,11 +83,11 @@
<dependencySet>
<outputDirectory>/lib</outputDirectory>
<unpack>false</unpack>
<scope>compile</scope>
<scope>runtime</scope>
<useProjectArtifact>false</useProjectArtifact>
<excludes>
<exclude>org.apache.ant:*:jar</exclude>
<exclude>org.apache.hadoop:hadoop-*:jar</exclude>
<exclude>org.apache.hadoop:hadoop-*:*:*:*</exclude>
<exclude>jdiff:jdiff:jar</exclude>
</excludes>
</dependencySet>

View File

@ -324,6 +324,13 @@ Trunk (unreleased changes)
HADOOP-7531. Add servlet util methods for handling paths in requests. (eli)
HADOOP-7493. Add ShortWritable. (Uma Maheswara Rao G via szetszwo)
HADOOP-7555. Add a eclipse-generated files to .gitignore. (atm)
HADOOP-7264. Bump avro version to at least 1.4.1. (Alejandro Abdelnur via
tomwhite)
OPTIMIZATIONS
HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@ -498,6 +505,9 @@ Trunk (unreleased changes)
HADOOP-7545. Common -tests JAR should not include properties and configs.
(todd)
HADOOP-7536. Correct the dependency version regressions introduced in
HADOOP-6671. (Alejandro Abdelnur via tomwhite)
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES

View File

@ -16,9 +16,9 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<artifactId>hadoop-project-distro</artifactId>
<version>0.23.0-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath>
<relativePath>../hadoop-project-distro</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
@ -28,18 +28,12 @@
<packaging>jar</packaging>
<properties>
<test.build.data>${project.build.directory}/test/data</test.build.data>
<hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
<test.build.webapps>${project.build.directory}/test-classes/webapps</test.build.webapps>
<test.cache.data>${project.build.directory}/test-classes</test.cache.data>
<test.build.classes>${project.build.directory}/test-classes</test.build.classes>
<build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
<snappy.prefix>/usr/local</snappy.prefix>
<snappy.lib>${snappy.prefix}/lib</snappy.lib>
<bundle.snappy>false</bundle.snappy>
<hadoop.component>common</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
@ -219,10 +213,15 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-ipc</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>net.sf.kosmosfs</groupId>
<artifactId>kfs</artifactId>
@ -243,83 +242,20 @@
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkMode>always</forkMode>
<forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
<argLine>-Xmx1024m</argLine>
<environmentVariables>
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
</environmentVariables>
<systemPropertyVariables>
<!-- TODO: all references in testcases should be updated to this default -->
<test.build.data>${test.build.data}</test.build.data>
<test.build.webapps>${test.build.webapps}</test.build.webapps>
<test.cache.data>${test.cache.data}</test.cache.data>
<hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
<test.build.classes>${test.build.classes}</test.build.classes>
<java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
<java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
</systemPropertyVariables>
<includes>
<include>**/Test*.java</include>
</includes>
<excludes>
<exclude>**/${test.exclude}.java</exclude>
<exclude>${test.exclude.pattern}</exclude>
<exclude>**/Test*$*.java</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<executions>
<execution>
<id>prepare-jar</id>
<phase>prepare-package</phase>
<id>generate-avro-test-sources</id>
<phase>generate-test-sources</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
<execution>
<id>prepare-test-jar</id>
<phase>prepare-package</phase>
<goals>
<goal>test-jar</goal>
</goals>
<configuration>
<includes>
<include>**/*.class</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<phase>prepare-package</phase>
<goals>
<goal>jar</goal>
<goal>test-jar</goal>
<goal>schema</goal>
<goal>protocol</goal>
</goals>
</execution>
</executions>
<configuration>
<attach>true</attach>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<configuration>
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
<testOutputDirectory>${project.build.directory}/generated-test-sources/java</testOutputDirectory>
</configuration>
</plugin>
<plugin>
@ -359,24 +295,6 @@
<recordcc destdir="${project.build.directory}/generated-test-sources/java">
<fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
</recordcc>
<taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
<classpath refid="maven.test.classpath"/>
</taskdef>
<schema destdir="${project.build.directory}/generated-test-sources/java">
<fileset dir="${basedir}/src/test">
<include name="**/*.avsc"/>
</fileset>
</schema>
<taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
<classpath refid="maven.test.classpath"/>
</taskdef>
<schema destdir="${project.build.directory}/generated-test-sources/java">
<fileset dir="${basedir}/src/test">
<include name="**/*.avpr"/>
</fileset>
</schema>
</target>
</configuration>
</execution>
@ -433,17 +351,6 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<configuration>
<configLocation>file://${basedir}/dev-support/checkstyle.xml</configLocation>
<failOnViolation>false</failOnViolation>
<format>xml</format>
<format>html</format>
<outputFile>${project.build.directory}/test/checkstyle-errors.xml</outputFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
@ -463,43 +370,6 @@
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<linksource>true</linksource>
<quiet>true</quiet>
<verbose>false</verbose>
<source>${maven.compile.source}</source>
<charset>${maven.compile.encoding}</charset>
<reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
<destDir>api</destDir>
<groups>
<group>
<title>${project.name} API</title>
<packages>org.apache.hadoop*</packages>
</group>
</groups>
<doclet>org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet</doclet>
<docletArtifacts>
<docletArtifact>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${project.version}</version>
</docletArtifact>
</docletArtifacts>
<useStandardDocletOptions>true</useStandardDocletOptions>
<!-- switch on dependency-driven aggregation -->
<includeDependencySources>true</includeDependencySources>
<dependencySourceIncludes>
<!-- include ONLY dependencies I control -->
<dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
</dependencySourceIncludes>
</configuration>
</plugin>
</plugins>
</build>
@ -636,427 +506,5 @@
</plugins>
</build>
</profile>
<profile>
<id>docs</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<jdiff.stable.api>0.20.2</jdiff.stable.api>
<jdiff.stability>-unstable</jdiff.stability>
<jdiff.compatibility></jdiff.compatibility>
<jdiff.javadoc.maxmemory>512m</jdiff.javadoc.maxmemory>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>javadoc</goal>
</goals>
<phase>prepare-package</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>findbugs</goal>
</goals>
<phase>prepare-package</phase>
</execution>
</executions>
<configuration>
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>site</id>
<phase>prepare-package</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>jdiff</groupId>
<artifactId>jdiff</artifactId>
<version>${jdiff.version}</version>
<overWrite>false</overWrite>
<outputDirectory>${project.build.directory}</outputDirectory>
<destFileName>jdiff.jar</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop.annotations.version}</version>
<overWrite>false</overWrite>
<outputDirectory>${project.build.directory}</outputDirectory>
<destFileName>hadoop-annotations.jar</destFileName>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>site</id>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/docs-src"/>
<copy todir="${project.build.directory}/docs-src">
<fileset dir="${basedir}/src/main/docs"/>
</copy>
<!-- Docs -->
<exec dir="${project.build.directory}/docs-src"
executable="${env.FORREST_HOME}/bin/forrest"
failonerror="true">
</exec>
<copy todir="${project.build.directory}/site">
<fileset dir="${project.build.directory}/docs-src/build/site"/>
</copy>
<copy file="${project.build.directory}/docs-src/releasenotes.html"
todir="${project.build.directory}/site"/>
<style basedir="${basedir}/src/main/resources"
destdir="${project.build.directory}/site"
includes="core-default.xml"
style="${basedir}/src/main/xsl/configuration.xsl"/>
<!-- Convert 'CHANGES.txt' to 'changes.html" -->
<exec executable="perl" input="${basedir}/../CHANGES.txt"
output="${project.build.directory}/site/changes.html"
failonerror="true">
<arg value="${project.build.directory}/docs-src/changes/changes2html.pl"/>
</exec>
<copy todir="${project.build.directory}/site">
<fileset dir="${project.build.directory}/docs-src/changes" includes="*.css"/>
</copy>
<!-- Jdiff -->
<mkdir dir="${project.build.directory}/site/jdiff/xml"/>
<javadoc maxmemory="${jdiff.javadoc.maxmemory}" verbose="yes">
<doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
<param name="-apidir" value="${project.build.directory}/site/jdiff/xml"/>
<param name="-apiname" value="hadoop-core ${project.version}"/>
<param name="${jdiff.stability}"/>
</doclet>
<packageset dir="${basedir}/src/main/java"/>
<classpath>
<path refid="maven.compile.classpath"/>
</classpath>
</javadoc>
<javadoc sourcepath="${basedir}/src/main/java"
destdir="${project.build.directory}/site/jdiff/xml"
sourceFiles="${basedir}/dev-support/jdiff/Null.java"
maxmemory="${jdiff.javadoc.maxmemory}">
<doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
<param name="-oldapi" value="hadoop-core ${jdiff.stable.api}"/>
<param name="-newapi" value="hadoop-core ${project.version}"/>
<param name="-oldapidir" value="${basedir}/dev-support/jdiff"/>
<param name="-newapidir" value="${project.build.directory}/site/jdiff/xml"/>
<param name="-javadocold"
value="http://hadoop.apache.org/docs/${jdiff.stable.api}/api/"/>
<param name="-javadocnew" value="${project.build.directory}/site/api"/>
<param name="-stats"/>
<param name="${jdiff.stability}"/>
<param name="${jdiff.compatibility}"/>
</doclet>
<classpath>
<path refid="maven.compile.classpath"/>
</classpath>
</javadoc>
<xslt style="${env.FINDBUGS_HOME}/src/xsl/default.xsl"
in="${project.build.directory}/findbugsXml.xml"
out="${project.build.directory}/site/findbugs.html"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>src</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId>
<version>${hadoop.assemblies.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>pre-tar-src</id>
<phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<attach>false</attach>
<finalName>${project.artifactId}-${project.version}</finalName>
<descriptorRefs>
<descriptorRef>hadoop-src</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>tar</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>pre-tar</id>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/tar-copynativelibs.sh">
which cygpath 2> /dev/null
if [ $? = 1 ]; then
BUILD_DIR="${project.build.directory}"
else
BUILD_DIR=`cygpath --unix '${project.build.directory}'`
fi
TAR='tar cf -'
UNTAR='tar xfBp -'
LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
if [ -d $${LIB_DIR} ] ; then
TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
mkdir -p $${TARGET_DIR}
cd $${LIB_DIR}
$$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
if [ "${bundle.snappy}" = "true" ] ; then
cd ${snappy.lib}
$$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
fi
fi
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./tar-copynativelibs.sh"/>
</exec>
</target>
</configuration>
</execution>
<execution>
<id>tar</id>
<phase>package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/tar-maketar.sh">
which cygpath 2> /dev/null
if [ $? = 1 ]; then
BUILD_DIR="${project.build.directory}"
else
BUILD_DIR=`cygpath --unix '${project.build.directory}'`
fi
cd ${BUILD_DIR}
tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./tar-maketar.sh"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId>
<version>${hadoop.assemblies.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>pre-tar</id>
<phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<attach>false</attach>
<finalName>${project.artifactId}-${project.version}</finalName>
<descriptorRefs>
<descriptorRef>hadoop-tar</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>bintar</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>pre-bintar</id>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/bintar-copynativelibs.sh">
which cygpath 2> /dev/null
if [ $? = 1 ]; then
BUILD_DIR="${project.build.directory}"
else
BUILD_DIR=`cygpath --unix '${project.build.directory}'`
fi
TAR='tar cf -'
UNTAR='tar xfBp -'
LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
if [ -d $${LIB_DIR} ] ; then
TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}-bin/lib"
mkdir -p $${TARGET_DIR}
cd $${LIB_DIR}
$$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
if [ "${bundle.snappy}" = "true" ] ; then
cd ${snappy.lib}
$$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
fi
fi
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./bintar-copynativelibs.sh"/>
</exec>
</target>
</configuration>
</execution>
<execution>
<id>bintar</id>
<phase>package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/bintar-maketar.sh">
which cygpath 2> /dev/null
if [ $? = 1 ]; then
BUILD_DIR="${project.build.directory}"
else
BUILD_DIR=`cygpath --unix '${project.build.directory}'`
fi
cd ${BUILD_DIR}
tar czf ${project.artifactId}-${project.version}-bin.tar.gz ${project.artifactId}-${project.version}-bin
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./bintar-maketar.sh"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId>
<version>${hadoop.assemblies.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>pre-bintar</id>
<phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<attach>false</attach>
<finalName>${project.artifactId}-${project.version}-bin</finalName>
<descriptorRefs>
<descriptorRef>hadoop-bintar</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,111 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** A WritableComparable for shorts. */
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ShortWritable implements WritableComparable<ShortWritable> {
private short value;
public ShortWritable() {
}
public ShortWritable(short value) {
set(value);
}
/** Set the value of this ShortWritable. */
public void set(short value) {
this.value = value;
}
/** Return the value of this ShortWritable. */
public short get() {
return value;
}
/** read the short value */
@Override
public void readFields(DataInput in) throws IOException {
value = in.readShort();
}
/** write short value */
@Override
public void write(DataOutput out) throws IOException {
out.writeShort(value);
}
/** Returns true iff <code>o</code> is a ShortWritable with the same value. */
@Override
public boolean equals(Object o) {
if (!(o instanceof ShortWritable))
return false;
ShortWritable other = (ShortWritable) o;
return this.value == other.value;
}
/** hash code */
@Override
public int hashCode() {
return value;
}
/** Compares two ShortWritable. */
@Override
public int compareTo(ShortWritable o) {
short thisValue = this.value;
short thatValue = (o).value;
return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
}
/** Short values in string format */
@Override
public String toString() {
return Short.toString(value);
}
/** A Comparator optimized for ShortWritable. */
public static class Comparator extends WritableComparator {
public Comparator() {
super(ShortWritable.class);
}
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
short thisValue = (short) readUnsignedShort(b1, s1);
short thatValue = (short) readUnsignedShort(b2, s2);
return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
}
}
static { // register this comparator
WritableComparator.define(ShortWritable.class, new Comparator());
}
}

View File

@ -28,6 +28,7 @@
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configured;
@ -93,7 +94,7 @@ public void close() throws IOException {
@Override
public void open(OutputStream out) throws IOException {
outStream = out;
encoder = new BinaryEncoder(out);
encoder = EncoderFactory.get().binaryEncoder(out, encoder);
}
@Override
@ -127,7 +128,7 @@ public T deserialize(T t) throws IOException {
@Override
public void open(InputStream in) throws IOException {
inStream = in;
decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null);
decoder = DecoderFactory.get().binaryDecoder(in, decoder);
}
}

View File

@ -34,9 +34,9 @@
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.reflect.ReflectRequestor;
import org.apache.avro.reflect.ReflectResponder;
import org.apache.avro.specific.SpecificRequestor;
import org.apache.avro.ipc.reflect.ReflectRequestor;
import org.apache.avro.ipc.reflect.ReflectResponder;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -22,8 +22,8 @@
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.specific.SpecificRequestor;
import org.apache.avro.specific.SpecificResponder;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.hadoop.classification.InterfaceStability;
/**

View File

@ -18,15 +18,16 @@
package org.apache.hadoop.io;
import java.io.IOException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Type;
import org.apache.avro.Schema;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import static junit.framework.TestCase.assertEquals;
@ -47,11 +48,11 @@ public static void testReflect(Object value, Type type, String schema)
// check that value is serialized correctly
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.write(value, new BinaryEncoder(out));
writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
Object after =
reader.read(null, DecoderFactory.defaultFactory().createBinaryDecoder(
out.toByteArray(), null));
reader.read(null,
DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
assertEquals(value, after);
}

View File

@ -18,10 +18,11 @@
package org.apache.hadoop.io;
import java.io.*;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
@ -68,6 +69,10 @@ public void testSimpleWritable() throws Exception {
public void testByteWritable() throws Exception {
testWritable(new ByteWritable((byte)128));
}
public void testShortWritable() throws Exception {
testWritable(new ShortWritable((byte)256));
}
public void testDoubleWritable() throws Exception {
testWritable(new DoubleWritable(1.0));
@ -104,13 +109,13 @@ private static class FrobComparator extends WritableComparator {
}
}
private static class Frob implements WritableComparable {
private static class Frob implements WritableComparable<Frob> {
static { // register default comparator
WritableComparator.define(Frob.class, new FrobComparator());
}
@Override public void write(DataOutput out) throws IOException {}
@Override public void readFields(DataInput in) throws IOException {}
@Override public int compareTo(Object o) { return 0; }
@Override public int compareTo(Frob o) { return 0; }
}
/** Test that comparator is defined. */
@ -118,5 +123,31 @@ public static void testGetComparator() throws Exception {
assert(WritableComparator.get(Frob.class) instanceof FrobComparator);
}
/**
* Test a user comparator that relies on deserializing both arguments for each
* compare.
*/
public void testShortWritableComparator() throws Exception {
ShortWritable writable1 = new ShortWritable((short)256);
ShortWritable writable2 = new ShortWritable((short) 128);
ShortWritable writable3 = new ShortWritable((short) 256);
final String SHOULD_NOT_MATCH_WITH_RESULT_ONE = "Result should be 1, should not match the writables";
assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE,
writable1.compareTo(writable2) == 1);
assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE, WritableComparator.get(
ShortWritable.class).compare(writable1, writable2) == 1);
final String SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE = "Result should be -1, should not match the writables";
assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, writable2
.compareTo(writable1) == -1);
assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, WritableComparator.get(
ShortWritable.class).compare(writable2, writable1) == -1);
final String SHOULD_MATCH = "Result should be 0, should match the writables";
assertTrue(SHOULD_MATCH, writable1.compareTo(writable1) == 0);
assertTrue(SHOULD_MATCH, WritableComparator.get(ShortWritable.class)
.compare(writable1, writable3) == 0);
}
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.ipc;
import org.apache.avro.ipc.AvroRemoteException;
import org.apache.avro.AvroRemoteException;
@SuppressWarnings("serial")
public interface AvroTestProtocol {

View File

@ -28,7 +28,7 @@
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.avro.ipc.AvroRemoteException;
import org.apache.avro.AvroRemoteException;
import org.apache.avro.util.Utf8;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -189,7 +189,7 @@ private void testSpecific(boolean secure) throws Exception {
(AvroSpecificTestProtocol)RPC.getProxy(AvroSpecificTestProtocol.class,
0, addr, conf);
Utf8 echo = proxy.echo(new Utf8("hello world"));
CharSequence echo = proxy.echo("hello world");
assertEquals("hello world", echo.toString());
int intResult = proxy.add(1, 2);
@ -210,7 +210,7 @@ public int add(int arg1, int arg2) throws AvroRemoteException {
}
@Override
public Utf8 echo(Utf8 msg) throws AvroRemoteException {
public CharSequence echo(CharSequence msg) throws AvroRemoteException {
return msg;
}

View File

@ -665,6 +665,14 @@ Trunk (unreleased changes)
HDFS-2233. Add WebUI tests with URI reserved chars. (eli)
HDFS-2265. Remove unnecessary BlockTokenSecretManager fields/methods from
BlockManager. (szetszwo)
HDFS-2260. Refactor BlockReader into an interface and implementation.
(todd)
HDFS-2096. Mavenization of hadoop-hdfs (Alejandro Abdelnur via tomwhite)
OPTIMIZATIONS
HDFS-1458. Improve checkpoint performance by avoiding unnecessary image
@ -969,6 +977,9 @@ Trunk (unreleased changes)
HDFS-73. DFSOutputStream does not close all the sockets.
(Uma Maheswara Rao G via eli)
HDFS-1257. Fix a race condition on BlockManager.recentInvalidateSets.
(Eric Payne via szetszwo)
BREAKDOWN OF HDFS-1073 SUBTASKS
HDFS-1521. Persist transaction ID on disk between NN restarts.

View File

@ -35,7 +35,7 @@
<!-- Checks that a package.html file exists for each package. -->
<!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
<module name="PackageHtml"/>
<module name="JavadocPackage"/>
<!-- Checks whether files end with a new line. -->
<!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
@ -45,6 +45,8 @@
<!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
<module name="Translation"/>
<module name="FileLength"/>
<module name="FileTabCharacter"/>
<module name="TreeWalker">
@ -96,7 +98,6 @@
<!-- Checks for Size Violations. -->
<!-- See http://checkstyle.sf.net/config_sizes.html -->
<module name="FileLength"/>
<module name="LineLength"/>
<module name="MethodLength"/>
<module name="ParameterNumber"/>
@ -110,7 +111,6 @@
<module name="NoWhitespaceBefore"/>
<module name="ParenPad"/>
<module name="TypecastParenPad"/>
<module name="TabCharacter"/>
<module name="WhitespaceAfter">
<property name="tokens" value="COMMA, SEMI"/>
</module>

View File

@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# The number of acceptable warning for this module
# Please update the root test-patch.properties if you update this file.
OK_RELEASEAUDIT_WARNINGS=0
OK_FINDBUGS_WARNINGS=0
OK_JAVADOC_WARNINGS=0

411
hadoop-hdfs/pom.xml Normal file
View File

@ -0,0 +1,411 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-distro</artifactId>
<version>0.23.0-SNAPSHOT</version>
<relativePath>../hadoop-project-distro</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>0.23.0-SNAPSHOT</version>
<description>Apache Hadoop HDFS</description>
<name>Apache Hadoop HDFS</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdfs</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjtools</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-daemon</groupId>
<artifactId>commons-daemon</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo.jspc</groupId>
<artifactId>jspc-maven-plugin</artifactId>
<executions>
<execution>
<id>hdfs</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<compile>false</compile>
<workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
<webFragmentFile>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webFragmentFile>
<packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
<sources>
<directory>${basedir}/src/main/webapps/hdfs</directory>
<includes>
<include>*.jsp</include>
</includes>
</sources>
</configuration>
</execution>
<execution>
<id>secondary</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<compile>false</compile>
<workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
<webFragmentFile>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webFragmentFile>
<packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
<sources>
<directory>${basedir}/src/main/webapps/secondary</directory>
<includes>
<include>*.jsp</include>
</includes>
</sources>
</configuration>
</execution>
<execution>
<id>datanode</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<compile>false</compile>
<workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
<webFragmentFile>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webFragmentFile>
<packageName>org.apache.hadoop.hdfs.server.datanode</packageName>
<sources>
<directory>${basedir}/src/main/webapps/datanode</directory>
<includes>
<include>*.jsp</include>
</includes>
</sources>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.codehaus.mojo.jspc</groupId>
<artifactId>jspc-compiler-tomcat5</artifactId>
<version>2.0-alpha-3</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl104-over-slf4j</artifactId>
<version>1.4.1</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/generated-src/main/jsp</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>create-web-xmls</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<loadfile property="hdfs.servlet.definitions" srcFile="${project.build.directory}/hdfs-jsp-servlet-definitions.xml"/>
<loadfile property="secondary.servlet.definitions" srcFile="${project.build.directory}/secondary-jsp-servlet-definitions.xml"/>
<loadfile property="datanode.servlet.definitions" srcFile="${project.build.directory}/datanode-jsp-servlet-definitions.xml"/>
<echoproperties destfile="${project.build.directory}/webxml.properties">
<propertyset>
<propertyref regex=".*.servlet.definitions"/>
</propertyset>
</echoproperties>
<filter filtersfile="${project.build.directory}/webxml.properties"/>
<copy file="${basedir}/src/main/webapps/proto-hdfs-web.xml"
tofile="${project.build.directory}/webapps/hdfs/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-secondary-web.xml"
tofile="${project.build.directory}/webapps/secondary/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-datanode-web.xml"
tofile="${project.build.directory}/webapps/datanode/WEB-INF/web.xml"
filtering="true"/>
<copy toDir="${project.build.directory}/webapps">
<fileset dir="${basedir}/src/main/webapps">
<exclude name="**/*.jsp"/>
<exclude name="**/proto-*-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>create-log-dir</id>
<phase>process-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${test.build.data}"/>
<mkdir dir="${hadoop.log.dir}"/>
<copy todir="${project.build.directory}/test-classes/webapps">
<fileset dir="${project.build.directory}/webapps">
<exclude name="proto-*-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>CHANGES.txt</exclude>
<exclude>CHANGES.HDFS-1623.txt</exclude>
<exclude>.idea/**</exclude>
<exclude>src/main/conf/*</exclude>
<exclude>src/main/docs/**</exclude>
<exclude>dev-support/findbugsExcludeFile.xml</exclude>
<exclude>dev-support/checkstyle*</exclude>
<exclude>dev-support/jdiff/**</exclude>
<exclude>dev-support/*tests</exclude>
<exclude>src/main/native/*</exclude>
<exclude>src/main/native/config/*</exclude>
<exclude>src/main/native/m4/*</exclude>
<exclude>src/test/empty-file</exclude>
<exclude>src/test/all-tests</exclude>
<exclude>src/test/resources/*.tgz</exclude>
<exclude>src/test/resources/data*</exclude>
<exclude>src/test/resources/editStored*</exclude>
<exclude>src/test/resources/empty-file</exclude>
<exclude>src/main/webapps/datanode/robots.txt</exclude>
<exclude>src/main/docs/releasenotes.html</exclude>
<exclude>src/contrib/**</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>native</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy toDir="${project.build.directory}/native">
<fileset dir="${basedir}/src/main/native"/>
</copy>
<mkdir dir="${project.build.directory}/native/m4"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>make-maven-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<phase>compile</phase>
<goals>
<goal>autoreconf</goal>
<goal>configure</goal>
<goal>make-install</goal>
</goals>
<configuration>
<!-- autoreconf settings -->
<workDir>${project.build.directory}/native</workDir>
<arguments>
<argument>-i</argument>
<argument>-f</argument>
</arguments>
<!-- configure settings -->
<configureEnvironment>
<property>
<name>ac_cv_func_malloc_0_nonnull</name>
<value>yes</value>
</property>
<property>
<name>JVM_ARCH</name>
<value>${sun.arch.data.model}</value>
</property>
</configureEnvironment>
<configureOptions>
</configureOptions>
<configureWorkDir>${project.build.directory}/native</configureWorkDir>
<prefix>/usr/local</prefix>
<!-- make settings -->
<installEnvironment>
<property>
<name>ac_cv_func_malloc_0_nonnull</name>
<value>yes</value>
</property>
<property>
<name>JVM_ARCH</name>
<value>${sun.arch.data.model}</value>
</property>
</installEnvironment>
<!-- configure & make settings -->
<destDir>${project.build.directory}/native/target</destDir>
</configuration>
</execution>
<!-- TODO wire here native testcases
<execution>
<id>test</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<destDir>${project.build.directory}/native/target</destDir>
</configuration>
</execution>
-->
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -1,3 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# syntax: [prefix].[source|sink].[instance].[options]
# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
</configuration>

Some files were not shown because too many files have changed in this diff Show More