hadoop/hadoop-hdfs-project/hadoop-hdfs/pom.xml

546 lines
19 KiB
XML
Raw Normal View History

<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId>
2020-03-29 17:54:25 +00:00
<version>3.4.0-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath>
</parent>
<artifactId>hadoop-hdfs</artifactId>
2020-03-29 17:54:25 +00:00
<version>3.4.0-SNAPSHOT</version>
<description>Apache Hadoop HDFS</description>
<name>Apache Hadoop HDFS</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdfs</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop.thirdparty</groupId>
<artifactId>hadoop-shaded-guava</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util-ajax</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-daemon</groupId>
<artifactId>commons-daemon</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>findbugs</artifactId>
<version>3.0.1</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-kms</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-kms</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>${leveldbjni.group}</groupId>
<artifactId>leveldbjni-all</artifactId>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<runningWithNative>${runningWithNative}</runningWithNative>
</systemPropertyVariables>
<properties>
<property>
<name>listener</name>
<value>org.apache.hadoop.test.TimedOutTestsListener</value>
</property>
</properties>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<configuration>
<skipTests>false</skipTests>
</configuration>
<executions>
<execution>
<id>create-web-xmls</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/hdfs/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/secondary/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/datanode/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/journal/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/nfs3/WEB-INF/web.xml"
filtering="true"/>
<copy toDir="${project.build.directory}/webapps">
<fileset dir="${basedir}/src/main/webapps">
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
<replace dir="${project.build.directory}/webapps" value="${release-year}">
<include name="**/*.html"/>
<replacetoken>{release-year-token}</replacetoken>
</replace>
</target>
</configuration>
</execution>
<execution>
<id>create-log-dir</id>
<phase>process-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${test.build.data}"/>
<mkdir dir="${test.build.data}"/>
<mkdir dir="${hadoop.log.dir}"/>
<copy todir="${project.build.directory}/test-classes/webapps">
<fileset dir="${project.build.directory}/webapps">
<exclude name="proto-*-web.xml"/>
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<phase>pre-site</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<tasks>
<copy file="src/main/resources/hdfs-default.xml" todir="src/site/resources"/>
<copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
</tasks>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<executions>
<execution>
<id>src-compile-protoc</id>
<configuration>
<skip>false</skip>
<additionalProtoPathElements>
<additionalProtoPathElement>
${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
</additionalProtoPathElement>
<additionalProtoPathElement>
${basedir}/../hadoop-hdfs-client/src/main/proto
</additionalProtoPathElement>
</additionalProtoPathElements>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.google.code.maven-replacer-plugin</groupId>
<artifactId>replacer</artifactId>
<executions>
<execution>
<id>replace-generated-sources</id>
<configuration>
<skip>false</skip>
</configuration>
</execution>
<execution>
<id>replace-sources</id>
<configuration>
<skip>false</skip>
<excludes>
<exclude>**/DFSUtil.java</exclude>
</excludes>
</configuration>
</execution>
<execution>
<id>replace-test-sources</id>
<configuration>
<skip>false</skip>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>resource-gz</id>
<phase>generate-resources</phase>
<goals>
<goal>resource-gz</goal>
</goals>
<configuration>
<inputDirectory>${basedir}/src/main/webapps/static</inputDirectory>
<outputDirectory>${basedir}/target/webapps/static</outputDirectory>
<extensions>js,css</extensions>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>.gitattributes</exclude>
<exclude>.idea/**</exclude>
<exclude>src/main/conf/*</exclude>
<exclude>dev-support/findbugsExcludeFile.xml</exclude>
<exclude>dev-support/checkstyle*</exclude>
<exclude>dev-support/jdiff/**</exclude>
<exclude>dev-support/*tests</exclude>
<exclude>src/test/empty-file</exclude>
<exclude>src/test/all-tests</exclude>
<exclude>src/test/resources/*.tgz</exclude>
<exclude>src/test/resources/data*</exclude>
<exclude>**/*.json</exclude>
<exclude>src/test/resources/editsStored*</exclude>
<exclude>src/test/resources/empty-file</exclude>
<exclude>src/main/webapps/datanode/robots.txt</exclude>
<exclude>src/main/webapps/hdfs/robots.txt</exclude>
<exclude>src/main/webapps/journal/robots.txt</exclude>
<exclude>src/main/webapps/secondary/robots.txt</exclude>
<exclude>src/contrib/**</exclude>
<exclude>src/site/resources/images/*</exclude>
<exclude>src/main/webapps/static/bootstrap-3.4.1/**</exclude>
<exclude>src/main/webapps/static/moment.min.js</exclude>
<exclude>src/main/webapps/static/dust-full-2.0.0.min.js</exclude>
<exclude>src/main/webapps/static/dust-helpers-1.1.1.min.js</exclude>
<exclude>src/main/webapps/static/jquery-3.5.1.min.js</exclude>
<exclude>src/main/webapps/static/jquery.dataTables.min.js</exclude>
<exclude>src/main/webapps/static/json-bignum.js</exclude>
<exclude>src/main/webapps/static/dataTables.bootstrap.css</exclude>
<exclude>src/main/webapps/static/dataTables.bootstrap.js</exclude>
<exclude>src/main/webapps/static/d3-v4.1.1.min.js</exclude>
<exclude>src/test/resources/diskBalancer/data-cluster-3node-3disk.json</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<configuration>
<filesets>
<fileset>
<directory>src/site/resources</directory>
<includes>
<include>configuration.xsl</include>
<include>hdfs-default.xml</include>
</includes>
<followSymlinks>false</followSymlinks>
</fileset>
</filesets>
</configuration>
</plugin>
</plugins>
</build>
HADOOP-12930. Dynamic subcommands for hadoop shell scripts (aw) This commit contains the following JIRA issues: HADOOP-12931. bin/hadoop work for dynamic subcommands HADOOP-12932. bin/yarn work for dynamic subcommands HADOOP-12933. bin/hdfs work for dynamic subcommands HADOOP-12934. bin/mapred work for dynamic subcommands HADOOP-12935. API documentation for dynamic subcommands HADOOP-12936. modify hadoop-tools to take advantage of dynamic subcommands HADOOP-13086. enable daemonization of dynamic commands HADOOP-13087. env var doc update for dynamic commands HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement HADOOP-13089. hadoop distcp adds client opts twice when dynamic HADOOP-13094. hadoop-common unit tests for dynamic commands HADOOP-13095. hadoop-hdfs unit tests for dynamic commands HADOOP-13107. clean up how rumen is executed HADOOP-13108. dynamic subcommands need a way to manipulate arguments HADOOP-13110. add a streaming subcommand to mapred HADOOP-13111. convert hadoop gridmix to be dynamic HADOOP-13115. dynamic subcommand docs should talk about exit vs. continue program flow HADOOP-13117. clarify daemonization and security vars for dynamic commands HADOOP-13120. add a --debug message when dynamic commands have been used HADOOP-13121. rename sub-project shellprofiles to match the rest of Hadoop HADOOP-13129. fix typo in dynamic subcommand docs HADOOP-13151. Underscores should be escaped in dynamic subcommands document HADOOP-13153. fix typo in debug statement for dynamic subcommands
2016-03-28 16:00:07 +00:00
<profiles>
<profile>
<id>parallel-tests</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>parallel-tests-createdir</id>
<goals>
<goal>parallel-tests-createdir</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<testFailureIgnore>${ignoreTestFailure}</testFailureIgnore>
<forkCount>${testsThreadCount}</forkCount>
<reuseForks>false</reuseForks>
<argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine>
<systemPropertyVariables>
<testsThreadCount>${testsThreadCount}</testsThreadCount>
<test.build.data>${test.build.data}/${surefire.forkNumber}</test.build.data>
<test.build.dir>${test.build.dir}/${surefire.forkNumber}</test.build.dir>
<hadoop.tmp.dir>${hadoop.tmp.dir}/${surefire.forkNumber}</hadoop.tmp.dir>
<!-- This is intentionally the same directory for all JUnit -->
<!-- forks, for use in the very rare situation that -->
<!-- concurrent tests need to coordinate, such as using lock -->
<!-- files. -->
<test.build.shared.data>${test.build.data}</test.build.shared.data>
<!-- Due to a Maven quirk, setting this to just -->
<!-- surefire.forkNumber won't do the parameter substitution. -->
<!-- Putting a prefix in front of it like "fork-" makes it -->
<!-- work. -->
<test.unique.fork.id>fork-${surefire.forkNumber}</test.unique.fork.id>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
</profile>
HADOOP-12930. Dynamic subcommands for hadoop shell scripts (aw) This commit contains the following JIRA issues: HADOOP-12931. bin/hadoop work for dynamic subcommands HADOOP-12932. bin/yarn work for dynamic subcommands HADOOP-12933. bin/hdfs work for dynamic subcommands HADOOP-12934. bin/mapred work for dynamic subcommands HADOOP-12935. API documentation for dynamic subcommands HADOOP-12936. modify hadoop-tools to take advantage of dynamic subcommands HADOOP-13086. enable daemonization of dynamic commands HADOOP-13087. env var doc update for dynamic commands HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement HADOOP-13089. hadoop distcp adds client opts twice when dynamic HADOOP-13094. hadoop-common unit tests for dynamic commands HADOOP-13095. hadoop-hdfs unit tests for dynamic commands HADOOP-13107. clean up how rumen is executed HADOOP-13108. dynamic subcommands need a way to manipulate arguments HADOOP-13110. add a streaming subcommand to mapred HADOOP-13111. convert hadoop gridmix to be dynamic HADOOP-13115. dynamic subcommand docs should talk about exit vs. continue program flow HADOOP-13117. clarify daemonization and security vars for dynamic commands HADOOP-13120. add a --debug message when dynamic commands have been used HADOOP-13121. rename sub-project shellprofiles to match the rest of Hadoop HADOOP-13129. fix typo in dynamic subcommand docs HADOOP-13151. Underscores should be escaped in dynamic subcommands document HADOOP-13153. fix typo in debug statement for dynamic subcommands
2016-03-28 16:00:07 +00:00
<!-- profile to test shell code -->
<profile>
<id>shelltest</id>
<activation>
<property>
<name>!skipTests</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>hdfs-test-bats-driver</id>
<phase>test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<exec dir="src/test/scripts"
executable="bash"
failonerror="true">
<arg value="./run-bats.sh" />
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>java9</id>
<activation>
<jdk>[9,)</jdk>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<testExcludes>
<testExclude>org/apache/hadoop/hdfs/TestDFSClientFailover.java</testExclude>
</testExcludes>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>