hadoop/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
2019-03-04 12:37:57 -08:00

330 lines
14 KiB
XML

<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId>
<version>3.3.0-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath>
</parent>
<artifactId>hadoop-hdfs-native-client</artifactId>
<version>3.3.0-SNAPSHOT</version>
<description>Apache Hadoop HDFS Native Client</description>
<name>Apache Hadoop HDFS Native Client</name>
<packaging>jar</packaging>
<properties>
<require.fuse>false</require.fuse>
<require.libwebhdfs>false</require.libwebhdfs>
<require.valgrind>false</require.valgrind>
<native_ctest_args></native_ctest_args>
<native_cmake_args></native_cmake_args>
<native_make_args></native_make_args>
<hadoop.component>hdfs</hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>src/main/native/*</exclude>
<exclude>src/main/native/config/*</exclude>
<exclude>src/main/native/m4/*</exclude>
<exclude>src/main/native/fuse-dfs/util/tree.h</exclude>
<exclude>src/main/native/libhdfspp/third_party/**</exclude>
<exclude>src/contrib/**</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>native-win</id>
<activation>
<activeByDefault>false</activeByDefault>
<os>
<family>windows</family>
</os>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>enforce-os</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<requireOS>
<family>windows</family>
<message>native-win build only supported on Windows</message>
</requireOS>
</rules>
<fail>true</fail>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native"
failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DHADOOP_BUILD=1 -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} -A '${env.PLATFORM}'"/>
<arg line="${native_cmake_args}"/>
</exec>
<exec executable="msbuild" dir="${project.build.directory}/native"
failonerror="true">
<arg line="ALL_BUILD.vcxproj /nologo /p:Configuration=RelWithDebInfo /p:LinkIncremental=false"/>
<arg line="${native_make_args}"/>
</exec>
<!-- Copy for inclusion in distribution. -->
<copy todir="${project.build.directory}/bin">
<fileset dir="${project.build.directory}/native/bin/RelWithDebInfo"/>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>native_tests</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<exec executable="ctest" failonerror="true" dir="${project.build.directory}/native">
<arg line="--output-on-failure"/>
<arg line="${native_ctest_args}"/>
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- HADOOP_HOME required to find winutils. -->
<env key="HADOOP_HOME" value="${hadoop.common.build.dir}"/>
<!-- Make sure hadoop.dll and jvm.dll are on PATH. -->
<env key="PATH" value="${env.PATH};${hadoop.common.build.dir}/bin;${java.home}/jre/bin/server;${java.home}/bin/server"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>native</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>cmake-compile</id>
<phase>compile</phase>
<goals><goal>cmake-compile</goal></goals>
<configuration>
<source>${basedir}/src</source>
<vars>
<GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
<JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
<REQUIRE_FUSE>${require.fuse}</REQUIRE_FUSE>
<REQUIRE_VALGRIND>${require.valgrind}</REQUIRE_VALGRIND>
<HADOOP_BUILD>1</HADOOP_BUILD>
<REQUIRE_LIBWEBHDFS>${require.libwebhdfs}</REQUIRE_LIBWEBHDFS>
<REQUIRE_OPENSSL>${require.openssl}</REQUIRE_OPENSSL>
<CUSTOM_OPENSSL_PREFIX>${openssl.prefix}</CUSTOM_OPENSSL_PREFIX>
<CUSTOM_OPENSSL_LIB>${openssl.lib}</CUSTOM_OPENSSL_LIB>
<CUSTOM_OPENSSL_INCLUDE>${openssl.include}</CUSTOM_OPENSSL_INCLUDE>
</vars>
<output>${project.build.directory}</output>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>native_tests</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<exec executable="ctest" failonerror="true" dir="${project.build.directory}/">
<arg line="--output-on-failure"/>
<arg line="${native_ctest_args}"/>
<env key="LIBHDFS_OPTS" value="${env.LIBHDFS_OPTS} -Xcheck:jni"/>
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
<env key="DYLD_LIBRARY_PATH" value="${env.DYLD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>native-clang</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>cmake-compile-clang</id>
<phase>compile</phase>
<goals><goal>cmake-compile</goal></goals>
<configuration>
<source>${basedir}/src</source>
<vars>
<CMAKE_C_COMPILER>clang</CMAKE_C_COMPILER>
<CMAKE_CXX_COMPILER>clang++</CMAKE_CXX_COMPILER>
<GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
<JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
<REQUIRE_FUSE>${require.fuse}</REQUIRE_FUSE>
<REQUIRE_VALGRIND>${require.valgrind}</REQUIRE_VALGRIND>
<HADOOP_BUILD>1</HADOOP_BUILD>
<REQUIRE_LIBWEBHDFS>${require.libwebhdfs}</REQUIRE_LIBWEBHDFS>
<REQUIRE_OPENSSL>${require.openssl}</REQUIRE_OPENSSL>
<CUSTOM_OPENSSL_PREFIX>${openssl.prefix}</CUSTOM_OPENSSL_PREFIX>
<CUSTOM_OPENSSL_LIB>${openssl.lib}</CUSTOM_OPENSSL_LIB>
<CUSTOM_OPENSSL_INCLUDE>${openssl.include}</CUSTOM_OPENSSL_INCLUDE>
</vars>
<output>${project.build.directory}/clang</output>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>native_tests_clang</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<exec executable="ctest" failonerror="true" dir="${project.build.directory}/clang">
<arg line="--output-on-failure"/>
<arg line="${native_ctest_args}"/>
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/clang/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
<env key="DYLD_LIBRARY_PATH" value="${env.DYLD_LIBRARY_PATH}:${project.build.directory}/clang/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>