hadoop/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml

322 lines
14 KiB
XML
Raw Normal View History

<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId>
<version>3.2.0-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath>
</parent>
<artifactId>hadoop-hdfs-native-client</artifactId>
<version>3.2.0-SNAPSHOT</version>
<description>Apache Hadoop HDFS Native Client</description>
<name>Apache Hadoop HDFS Native Client</name>
<packaging>jar</packaging>
<properties>
<require.fuse>false</require.fuse>
<require.libwebhdfs>false</require.libwebhdfs>
<require.valgrind>false</require.valgrind>
<native_ctest_args></native_ctest_args>
<native_cmake_args></native_cmake_args>
<native_make_args></native_make_args>
<hadoop.component>hdfs</hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>src/main/native/*</exclude>
<exclude>src/main/native/config/*</exclude>
<exclude>src/main/native/m4/*</exclude>
2015-10-12 21:12:35 +00:00
<exclude>src/main/native/fuse-dfs/util/tree.h</exclude>
<exclude>src/main/native/libhdfspp/third_party/**</exclude>
<exclude>src/contrib/**</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>native-win</id>
<activation>
<activeByDefault>false</activeByDefault>
<os>
<family>windows</family>
</os>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>enforce-os</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<requireOS>
<family>windows</family>
<message>native-win build only supported on Windows</message>
</requireOS>
</rules>
<fail>true</fail>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native"
failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DHADOOP_BUILD=1 -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} -A '${env.PLATFORM}'"/>
<arg line="${native_cmake_args}"/>
</exec>
<exec executable="msbuild" dir="${project.build.directory}/native"
failonerror="true">
<arg line="ALL_BUILD.vcxproj /nologo /p:Configuration=RelWithDebInfo /p:LinkIncremental=false"/>
<arg line="${native_make_args}"/>
</exec>
<!-- Copy for inclusion in distribution. -->
<copy todir="${project.build.directory}/bin">
<fileset dir="${project.build.directory}/native/bin/RelWithDebInfo"/>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>native_tests</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<exec executable="ctest" failonerror="true" dir="${project.build.directory}/native">
<arg line="--output-on-failure"/>
<arg line="${native_ctest_args}"/>
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- HADOOP_HOME required to find winutils. -->
<env key="HADOOP_HOME" value="${hadoop.common.build.dir}"/>
<!-- Make sure hadoop.dll and jvm.dll are on PATH. -->
<env key="PATH" value="${env.PATH};${hadoop.common.build.dir}/bin;${java.home}/jre/bin/server;${java.home}/bin/server"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>native</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}"/>
<exec executable="cmake" dir="${project.build.directory}" failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DHADOOP_BUILD=1 -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} "/>
<arg line="${native_cmake_args}"/>
</exec>
<exec executable="make" dir="${project.build.directory}" failonerror="true">
<arg line="${native_make_args}"/>
</exec>
</target>
</configuration>
</execution>
<execution>
<id>native_tests</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<exec executable="ctest" failonerror="true" dir="${project.build.directory}/">
<arg line="--output-on-failure"/>
<arg line="${native_ctest_args}"/>
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>test-patch</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make_altern</id>
<phase>compile</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/altern"/>
<condition property="c_compiler" value="clang" else="gcc">
<contains string="${env.CC}" substring="gcc"/>
</condition>
<condition property="cxx_compiler" value="clang++" else="g++">
<contains string="${env.CXX}" substring="g++"/>
</condition>
<exec executable="cmake" dir="${project.build.directory}/altern" failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/altern/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DHADOOP_BUILD=1 -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} "/>
<arg line="-DCMAKE_C_COMPILER=${c_compiler} -DCMAKE_CXX_COMPILER=${cxx_compiler}"/>
<arg line="${native_cmake_args}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/altern" failonerror="true">
<arg line="${native_make_args}"/>
</exec>
</target>
</configuration>
</execution>
<execution>
<id>native_tests_altern</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<exec executable="ctest" failonerror="true" dir="${project.build.directory}/altern">
<arg line="--output-on-failure"/>
<arg line="${native_ctest_args}"/>
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/altern/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
</target>
</configuration>
</execution>
<execution>
<id>clean_altern</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<delete dir="${project.build.directory}/altern" includeemptydirs="true"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>