Merge trunk into HA branch after mavenization of hadoop-common
(no conflicts, straight SVN merge) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1153931 13f79535-47bb-0310-9956-ffa450edef68
7
.gitignore
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
.idea
|
||||
.svn
|
||||
.classpath
|
||||
target
|
1927
common/build.xml
@ -1,24 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
|
||||
<xsl:output method="html"/>
|
||||
<xsl:template match="configuration">
|
||||
<html>
|
||||
<body>
|
||||
<table border="1">
|
||||
<tr>
|
||||
<td>name</td>
|
||||
<td>value</td>
|
||||
<td>description</td>
|
||||
</tr>
|
||||
<xsl:for-each select="property">
|
||||
<tr>
|
||||
<td><a name="{name}"><xsl:value-of select="name"/></a></td>
|
||||
<td><xsl:value-of select="value"/></td>
|
||||
<td><xsl:value-of select="description"/></td>
|
||||
</tr>
|
||||
</xsl:for-each>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
@ -1,8 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
||||
|
||||
<!-- Put site-specific property overrides in this file. -->
|
||||
|
||||
<configuration>
|
||||
|
||||
</configuration>
|
@ -1,66 +0,0 @@
|
||||
# Set Hadoop-specific environment variables here.
|
||||
|
||||
# The only required environment variable is JAVA_HOME. All others are
|
||||
# optional. When running a distributed configuration it is best to
|
||||
# set JAVA_HOME in this file, so that it is correctly defined on
|
||||
# remote nodes.
|
||||
|
||||
# The java implementation to use. Required.
|
||||
export JAVA_HOME=${JAVA_HOME}
|
||||
|
||||
# Hadoop Installation Prefix
|
||||
HADOOP_PREFIX=${HADOOP_PREFIX}
|
||||
|
||||
# Hadoop Configuration Directory
|
||||
HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
|
||||
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/conf}
|
||||
|
||||
# Extra Java CLASSPATH elements. Optional.
|
||||
# export HADOOP_CLASSPATH="<extra_entries>:$HADOOP_CLASSPATH"
|
||||
|
||||
# The maximum amount of heap to use, in MB. Default is 1000.
|
||||
# export HADOOP_HEAPSIZE=2000
|
||||
|
||||
# Extra Java runtime options. Empty by default.
|
||||
# if [ "$HADOOP_OPTS" == "" ]; then export HADOOP_OPTS=-server; else HADOOP_OPTS+=" -server"; fi
|
||||
|
||||
# Command specific options appended to HADOOP_OPTS when specified
|
||||
export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
|
||||
export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
|
||||
export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
|
||||
export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
|
||||
export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
|
||||
export HADOOP_TASKTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_TASKTRACKER_OPTS"
|
||||
# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
|
||||
# export HADOOP_CLIENT_OPTS
|
||||
|
||||
# Extra ssh options. Empty by default.
|
||||
# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
|
||||
|
||||
# File naming remote slave hosts. $HADOOP_PREFIX/conf/slaves by default.
|
||||
export HADOOP_SLAVES=${HADOOP_CONF_DIR}/slaves
|
||||
|
||||
# host:path where hadoop code should be rsync'd from. Unset by default.
|
||||
# export HADOOP_MASTER=master:/home/$USER/src/hadoop
|
||||
|
||||
# Seconds to sleep between slave commands. Unset by default. This
|
||||
# can be useful in large clusters, where, e.g., slave rsyncs can
|
||||
# otherwise arrive faster than the master can service them.
|
||||
# export HADOOP_SLAVE_SLEEP=0.1
|
||||
|
||||
# The directory where pid files are stored. /tmp by default.
|
||||
HADOOP_PID_DIR=${HADOOP_PID_DIR}
|
||||
export HADOOP_PID_DIR=${HADOOP_PID_DIR:-$HADOOP_PREFIX/var/run}
|
||||
|
||||
# A string representing this instance of hadoop. $USER by default.
|
||||
export HADOOP_IDENT_STRING=`whoami`
|
||||
|
||||
# The scheduling priority for daemon processes. See 'man nice'.
|
||||
# export HADOOP_NICENESS=10
|
||||
|
||||
# Allow Hadoop to run with sysctl net.ipv6.bindv6only = 1
|
||||
# export HADOOP_ALLOW_IPV6=yes
|
||||
|
||||
# Where log files are stored. $HADOOP_PREFIX/logs by default.
|
||||
HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$HADOOP_IDENT_STRING
|
||||
export HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-$HADOOP_PREFIX/var/log}
|
@ -1,106 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
||||
|
||||
<!-- Put site-specific property overrides in this file. -->
|
||||
|
||||
<configuration>
|
||||
<property>
|
||||
<name>security.client.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for ClientProtocol, which is used by user code
|
||||
via the DistributedFileSystem.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.client.datanode.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for ClientDatanodeProtocol, the client-to-datanode protocol
|
||||
for block recovery.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.datanode.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for DatanodeProtocol, which is used by datanodes to
|
||||
communicate with the namenode.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.inter.datanode.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for InterDatanodeProtocol, the inter-datanode protocol
|
||||
for updating generation timestamp.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.namenode.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for NamenodeProtocol, the protocol used by the secondary
|
||||
namenode to communicate with the namenode.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.inter.tracker.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for InterTrackerProtocol, used by the tasktrackers to
|
||||
communicate with the jobtracker.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.job.submission.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for JobSubmissionProtocol, used by job clients to
|
||||
communciate with the jobtracker for job submission, querying job status etc.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.task.umbilical.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for TaskUmbilicalProtocol, used by the map and reduce
|
||||
tasks to communicate with the parent tasktracker.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.refresh.policy.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for RefreshAuthorizationPolicyProtocol, used by the
|
||||
dfsadmin and mradmin commands to refresh the security policy in-effect.
|
||||
The ACL is a comma-separated list of user and group names. The user and
|
||||
group list is separated by a blank. For e.g. "alice,bob users,wheel".
|
||||
A special value of "*" means all users are allowed.</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>security.admin.operations.protocol.acl</name>
|
||||
<value>*</value>
|
||||
<description>ACL for AdminOperationsProtocol, used by the mradmins commands
|
||||
to refresh queues and nodes at JobTracker. The ACL is a comma-separated list of
|
||||
user and group names. The user and group list is separated by a blank.
|
||||
For e.g. "alice,bob users,wheel". A special value of "*" means all users are
|
||||
allowed.</description>
|
||||
</property>
|
||||
</configuration>
|
@ -1 +0,0 @@
|
||||
localhost
|
@ -1,2 +0,0 @@
|
||||
# Specify multiple slaves, one per each line.
|
||||
localhost
|
331
common/ivy.xml
@ -1,331 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<ivy-module version="1.0">
|
||||
<info organisation="org.apache.hadoop" module="${ant.project.name}" revision="${version}">
|
||||
<license name="Apache 2.0"/>
|
||||
<ivyauthor name="Apache Hadoop Team" url="http://hadoop.apache.org"/>
|
||||
<description>
|
||||
Hadoop Common
|
||||
</description>
|
||||
</info>
|
||||
<configurations defaultconfmapping="default">
|
||||
<!--these match the Maven configurations-->
|
||||
<conf name="default" extends="master,runtime"/>
|
||||
<conf name="master" description="contains the artifact but no dependencies"/>
|
||||
<conf name="runtime" description="runtime but not the artifact"
|
||||
extends="client,server,s3-server,kfs,mandatory,jetty,ftp"/>
|
||||
|
||||
<conf name="mandatory" description="contains the critical dependencies"
|
||||
extends="commons-logging,log4j"/>
|
||||
|
||||
<!--
|
||||
These public configurations contain the core dependencies for running hadoop client or server.
|
||||
The server is effectively a superset of the client.
|
||||
-->
|
||||
<conf name="client" description="client-side dependencies"
|
||||
extends="mandatory,httpclient"/>
|
||||
<conf name="server" description="server-side dependencies"
|
||||
extends="client"/>
|
||||
<conf name="s3-client" description="dependencies for working with S3/EC2 infrastructure"
|
||||
extends="client"/>
|
||||
<conf name="s3-server" description="dependencies for running on S3/EC2 infrastructure"
|
||||
extends="s3-client,server"/>
|
||||
<conf name="kfs" description="dependencies for KFS file system support"/>
|
||||
<conf name="ftp" description="dependencies for workign with FTP filesytems"
|
||||
extends="mandatory"/>
|
||||
<conf name="jetty" description="Jetty provides the in-VM HTTP daemon" extends="commons-logging"/>
|
||||
|
||||
<conf name="common" extends="runtime,mandatory,httpclient,ftp,jetty,jdiff"
|
||||
description="common artifacts"/>
|
||||
<!--Testing pulls in everything-->
|
||||
<conf name="test" extends="master" description="the classpath needed to run tests"/>
|
||||
|
||||
<!--Packaging pulls in everything-->
|
||||
<conf name="package" extends="master" description="the classpath needed for packaging"/>
|
||||
|
||||
<!--Private configurations. -->
|
||||
|
||||
<conf name="javadoc" visibility="private" description="artiracts required while performing doc generation"
|
||||
extends="common,mandatory,jetty,lucene"/>
|
||||
|
||||
<conf name="releaseaudit" visibility="private"
|
||||
description="Artifacts required for releaseaudit target"/>
|
||||
|
||||
<conf name="commons-logging" visibility="private"/>
|
||||
<conf name="httpclient" visibility="private" extends="commons-logging"/>
|
||||
<conf name="log4j" visibility="private"/>
|
||||
<conf name="lucene" visibility="private"/>
|
||||
<conf name="jdiff" visibility="private" extends="log4j,s3-client,jetty,server"/>
|
||||
<conf name="checkstyle" visibility="private"/>
|
||||
|
||||
</configurations>
|
||||
|
||||
<publications>
|
||||
<!--get the artifact from our module name-->
|
||||
<artifact conf="master"/>
|
||||
</publications>
|
||||
<dependencies>
|
||||
|
||||
<!--used client side-->
|
||||
<dependency org="commons-cli"
|
||||
name="commons-cli"
|
||||
rev="${commons-cli.version}"
|
||||
conf="client->default"/>
|
||||
<dependency org="checkstyle"
|
||||
name="checkstyle"
|
||||
rev="${checkstyle.version}"
|
||||
conf="checkstyle->default"/>
|
||||
<dependency org="jdiff"
|
||||
name="jdiff"
|
||||
rev="${jdiff.version}"
|
||||
conf="jdiff->default"/>
|
||||
|
||||
<dependency org="xmlenc"
|
||||
name="xmlenc"
|
||||
rev="${xmlenc.version}"
|
||||
conf="server->default"/>
|
||||
|
||||
<!--Configuration: httpclient-->
|
||||
|
||||
<!--
|
||||
commons-httpclient asks for too many files.
|
||||
All it needs is commons-codec and commons-logging JARs
|
||||
-->
|
||||
<dependency org="commons-httpclient"
|
||||
name="commons-httpclient"
|
||||
rev="${commons-httpclient.version}"
|
||||
conf="httpclient->master">
|
||||
</dependency>
|
||||
|
||||
<dependency org="commons-codec"
|
||||
name="commons-codec"
|
||||
rev="${commons-codec.version}"
|
||||
conf="httpclient->default"/>
|
||||
|
||||
<dependency org="commons-net"
|
||||
name="commons-net"
|
||||
rev="${commons-net.version}"
|
||||
conf="ftp->default"/>
|
||||
|
||||
<!--Configuration: Jetty -->
|
||||
|
||||
<!-- <dependency org="javax.servlet"
|
||||
name="servlet-api"
|
||||
rev="${servlet-api.version}"
|
||||
conf="jetty->master"/> -->
|
||||
<dependency org="org.mortbay.jetty"
|
||||
name="jetty"
|
||||
rev="${jetty.version}"
|
||||
conf="jetty->master"/>
|
||||
<dependency org="org.mortbay.jetty"
|
||||
name="jetty-util"
|
||||
rev="${jetty-util.version}"
|
||||
conf="jetty->master"/>
|
||||
|
||||
<dependency org="tomcat"
|
||||
name="jasper-runtime"
|
||||
rev="${jasper.version}"
|
||||
conf="jetty->master"/>
|
||||
<dependency org="tomcat"
|
||||
name="jasper-compiler"
|
||||
rev="${jasper.version}"
|
||||
conf="jetty->master"/>
|
||||
<dependency org="org.mortbay.jetty"
|
||||
name="jsp-api-2.1"
|
||||
rev="${jetty.version}"
|
||||
conf="jetty->master"/>
|
||||
<dependency org="org.mortbay.jetty"
|
||||
name="jsp-2.1"
|
||||
rev="${jetty.version}"
|
||||
conf="jetty->master"/>
|
||||
<dependency org="commons-el"
|
||||
name="commons-el"
|
||||
rev="${commons-el.version}"
|
||||
conf="jetty->master"/>
|
||||
|
||||
|
||||
<!--Configuration: commons-logging -->
|
||||
|
||||
<!--it is essential that only the master JAR of commons logging
|
||||
is pulled in, as its dependencies are usually a mess, including things
|
||||
like out of date servlet APIs, bits of Avalon, etc.
|
||||
-->
|
||||
<dependency org="commons-logging"
|
||||
name="commons-logging"
|
||||
rev="${commons-logging.version}"
|
||||
conf="commons-logging->master"/>
|
||||
|
||||
|
||||
<!--Configuration: commons-logging -->
|
||||
|
||||
<!--log4J is not optional until commons-logging.properties is stripped out of the JAR -->
|
||||
<dependency org="log4j"
|
||||
name="log4j"
|
||||
rev="${log4j.version}"
|
||||
conf="log4j->master"/>
|
||||
|
||||
<!--Configuration: s3-client -->
|
||||
<!--there are two jets3t projects in the repository; this one goes up to 0.6 and
|
||||
is assumed to be the live one-->
|
||||
<dependency org="net.java.dev.jets3t"
|
||||
name="jets3t"
|
||||
rev="${jets3t.version}"
|
||||
conf="s3-client->master"/>
|
||||
<dependency org="commons-net"
|
||||
name="commons-net"
|
||||
rev="${commons-net.version}"
|
||||
conf="s3-client->master"/>
|
||||
<dependency org="org.mortbay.jetty"
|
||||
name="servlet-api-2.5"
|
||||
rev="${servlet-api-2.5.version}"
|
||||
conf="s3-client->master"/>
|
||||
<dependency org="net.sf.kosmosfs"
|
||||
name="kfs"
|
||||
rev="${kfs.version}"
|
||||
conf="kfs->default"/>
|
||||
|
||||
<!--Configuration: test -->
|
||||
<!--artifacts needed for testing -->
|
||||
|
||||
<dependency org="org.apache.ftpserver"
|
||||
name="ftplet-api"
|
||||
rev="${ftplet-api.version}"
|
||||
conf="test->default"/>
|
||||
<dependency org="org.apache.mina"
|
||||
name="mina-core"
|
||||
rev="${mina-core.version}"
|
||||
conf="test->default"/>
|
||||
<dependency org="org.apache.ftpserver"
|
||||
name="ftpserver-core"
|
||||
rev="${ftpserver-core.version}"
|
||||
conf="test->default"/>
|
||||
<dependency org="org.apache.ftpserver"
|
||||
name="ftpserver-deprecated"
|
||||
rev="${ftpserver-deprecated.version}"
|
||||
conf="test->default"/>
|
||||
|
||||
<dependency org="junit"
|
||||
name="junit"
|
||||
rev="${junit.version}"
|
||||
conf="test->default"/>
|
||||
<dependency org="org.apache.rat"
|
||||
name="apache-rat-tasks"
|
||||
rev="${rats-lib.version}"
|
||||
conf="releaseaudit->default"/>
|
||||
<dependency org="commons-lang"
|
||||
name="commons-lang"
|
||||
rev="${commons-lang.version}"
|
||||
conf="releaseaudit->default"/>
|
||||
<dependency org="commons-collections"
|
||||
name="commons-collections"
|
||||
rev="${commons-collections.version}"
|
||||
conf="releaseaudit->default"/>
|
||||
<dependency org="hsqldb"
|
||||
name="hsqldb"
|
||||
rev="${hsqldb.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="org.apache.lucene"
|
||||
name="lucene-core"
|
||||
rev="${lucene-core.version}"
|
||||
conf="javadoc->default"/>
|
||||
<dependency org="commons-logging"
|
||||
name="commons-logging-api"
|
||||
rev="${commons-logging-api.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="org.slf4j"
|
||||
name="slf4j-api"
|
||||
rev="${slf4j-api.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="org.eclipse.jdt"
|
||||
name="core"
|
||||
rev="${core.version}"
|
||||
conf="common->master"/>
|
||||
<dependency org="oro"
|
||||
name="oro"
|
||||
rev="${oro.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="org.slf4j"
|
||||
name="slf4j-log4j12"
|
||||
rev="${slf4j-log4j12.version}"
|
||||
conf="common->master">
|
||||
</dependency>
|
||||
<dependency org="org.apache.hadoop"
|
||||
name="avro"
|
||||
rev="${avro.version}"
|
||||
conf="common->default">
|
||||
<exclude module="ant"/>
|
||||
<exclude module="jetty"/>
|
||||
<exclude module="slf4j-simple"/>
|
||||
</dependency>
|
||||
<dependency org="org.codehaus.jackson"
|
||||
name="jackson-mapper-asl"
|
||||
rev="${jackson.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="com.thoughtworks.paranamer"
|
||||
name="paranamer"
|
||||
rev="${paranamer.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="com.thoughtworks.paranamer"
|
||||
name="paranamer-ant"
|
||||
rev="${paranamer.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="org.aspectj"
|
||||
name="aspectjrt"
|
||||
rev="${aspectj.version}"
|
||||
conf="common->default">
|
||||
</dependency>
|
||||
<dependency org="org.aspectj"
|
||||
name="aspectjtools"
|
||||
rev="${aspectj.version}"
|
||||
conf="common->default">
|
||||
</dependency>
|
||||
<dependency org="org.mockito"
|
||||
name="mockito-all"
|
||||
rev="${mockito-all.version}"
|
||||
conf="test->default">
|
||||
</dependency>
|
||||
<dependency org="com.jcraft"
|
||||
name="jsch"
|
||||
rev="${jsch.version}"
|
||||
conf="common->default">
|
||||
</dependency>
|
||||
<!--Configuration: package -->
|
||||
<!--artifacts needed for packaging -->
|
||||
<dependency org="org.vafer"
|
||||
name="jdeb"
|
||||
rev="${jdeb.version}"
|
||||
conf="package->master">
|
||||
</dependency>
|
||||
<dependency org="commons-configuration"
|
||||
name="commons-configuration"
|
||||
rev="${commons-configuration.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="org.apache.commons"
|
||||
name="commons-math"
|
||||
rev="${commons-math.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="com.google.guava"
|
||||
name="guava"
|
||||
rev="${guava.version}"
|
||||
conf="common->default"/>
|
||||
<dependency org="com.google.protobuf"
|
||||
name="protobuf-java"
|
||||
rev="${protobuf.version}"
|
||||
conf="common->default"/>
|
||||
</dependencies>
|
||||
</ivy-module>
|
@ -1,139 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common-instrumented</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<version>@version</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<version>1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xmlenc</groupId>
|
||||
<artifactId>xmlenc</artifactId>
|
||||
<version>0.52</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
<version>3.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>1.4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
<artifactId>commons-net</artifactId>
|
||||
<version>1.4.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-runtime</artifactId>
|
||||
<version>5.5.12</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-compiler</artifactId>
|
||||
<version>5.5.12</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jsp-api-2.1</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jsp-2.1</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-el</groupId>
|
||||
<artifactId>commons-el</artifactId>
|
||||
<version>1.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.java.dev.jets3t</groupId>
|
||||
<artifactId>jets3t</artifactId>
|
||||
<version>0.7.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
<artifactId>commons-net</artifactId>
|
||||
<version>1.4.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>servlet-api-2.5</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.kosmosfs</groupId>
|
||||
<artifactId>kfs</artifactId>
|
||||
<version>0.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.8.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>oro</groupId>
|
||||
<artifactId>oro</artifactId>
|
||||
<version>2.0.8</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>avro</artifactId>
|
||||
<version>1.3.2</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<!-- Don't pull in Avro's (later) version of Jetty.-->
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<!-- Exclude Avro's version of ant since it conflicts with Jetty's.-->
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
@ -1,159 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<version>@version</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<version>1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xmlenc</groupId>
|
||||
<artifactId>xmlenc</artifactId>
|
||||
<version>0.52</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
<version>3.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>1.4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
<artifactId>commons-net</artifactId>
|
||||
<version>1.4.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-runtime</artifactId>
|
||||
<version>5.5.12</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-compiler</artifactId>
|
||||
<version>5.5.12</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jsp-api-2.1</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jsp-2.1</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-el</groupId>
|
||||
<artifactId>commons-el</artifactId>
|
||||
<version>1.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.java.dev.jets3t</groupId>
|
||||
<artifactId>jets3t</artifactId>
|
||||
<version>0.7.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
<artifactId>commons-net</artifactId>
|
||||
<version>1.4.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>servlet-api-2.5</artifactId>
|
||||
<version>6.1.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.kosmosfs</groupId>
|
||||
<artifactId>kfs</artifactId>
|
||||
<version>0.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.8.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>oro</groupId>
|
||||
<artifactId>oro</artifactId>
|
||||
<version>2.0.8</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>avro</artifactId>
|
||||
<version>1.3.2</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<!-- Don't pull in Avro's (later) version of Jetty.-->
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<!-- Exclude Avro's version of ant since it conflicts with Jetty's.-->
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-configuration</groupId>
|
||||
<artifactId>commons-configuration</artifactId>
|
||||
<version>1.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-math</artifactId>
|
||||
<version>2.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>r09</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>2.4.0a</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
@ -1,58 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common-test</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<version>@version</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<version>@version</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ftpserver</groupId>
|
||||
<artifactId>ftplet-api</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.mina</groupId>
|
||||
<artifactId>mina-core</artifactId>
|
||||
<version>2.0.0-M5</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ftpserver</groupId>
|
||||
<artifactId>ftpserver-core</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ftpserver</groupId>
|
||||
<artifactId>ftpserver-deprecated</artifactId>
|
||||
<version>1.0.0-M2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>1.8.5</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
@ -1,50 +0,0 @@
|
||||
<ivysettings>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<property name="repo.maven.org" value="http://repo1.maven.org/maven2/" override="false"/>
|
||||
|
||||
<property name="maven2.pattern" value="[organisation]/[module]/[revision]/[module]-[revision]"/>
|
||||
<property name="repo.dir" value="${user.home}/.m2/repository"/>
|
||||
<!-- pull in the local repository -->
|
||||
<include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
|
||||
|
||||
<property name="resolvers" value="default" override="false"/>
|
||||
<property name="force-resolve" value="false" override="false"/>
|
||||
<settings defaultResolver="${resolvers}"/>
|
||||
|
||||
<resolvers>
|
||||
<!--ibiblio resolvers-->
|
||||
<ibiblio name="maven2" root="${repo.maven.org}" m2compatible="true"/>
|
||||
|
||||
<filesystem name="fs" m2compatible="true" force="${force-resolve}">
|
||||
<artifact pattern="${repo.dir}/${maven2.pattern}.[ext]"/>
|
||||
<ivy pattern="${repo.dir}/${maven2.pattern}.pom"/>
|
||||
</filesystem>
|
||||
|
||||
<chain name="default" dual="true">
|
||||
<resolver ref="maven2"/>
|
||||
</chain>
|
||||
|
||||
<chain name="internal" dual="true">
|
||||
<resolver ref="fs"/>
|
||||
<resolver ref="maven2"/>
|
||||
</chain>
|
||||
|
||||
</resolvers>
|
||||
|
||||
</ivysettings>
|
@ -1,90 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#This properties file lists the versions of the various artifacts used by hadoop and components.
|
||||
#It drives ivy and the generation of a maven POM
|
||||
|
||||
#These are the versions of our dependencies (in alphabetical order)
|
||||
ant-task.version=2.0.10
|
||||
|
||||
avro.version=1.3.2
|
||||
|
||||
checkstyle.version=4.2
|
||||
|
||||
commons-cli.version=1.2
|
||||
commons-cli2.version=2.0-mahout
|
||||
commons-codec.version=1.4
|
||||
commons-collections.version=3.1
|
||||
commons-configuration.version=1.6
|
||||
commons-httpclient.version=3.1
|
||||
commons-lang.version=2.5
|
||||
commons-logging.version=1.1.1
|
||||
commons-logging-api.version=1.1
|
||||
commons-el.version=1.0
|
||||
commons-fileupload.version=1.2
|
||||
commons-io.version=1.4
|
||||
commons-math.version=2.1
|
||||
commons-net.version=1.4.1
|
||||
core.version=3.1.1
|
||||
coreplugin.version=1.3.2
|
||||
|
||||
ftplet-api.version=1.0.0
|
||||
ftpserver-core.version=1.0.0
|
||||
ftpserver-deprecated.version=1.0.0-M2
|
||||
|
||||
guava.version=r09
|
||||
|
||||
hsqldb.version=1.8.0.10
|
||||
|
||||
ivy.version=2.1.0
|
||||
|
||||
jasper.version=5.5.12
|
||||
jdeb.version=0.8
|
||||
jsp.version=2.1
|
||||
jsp-api.version=5.5.12
|
||||
jets3t.version=0.7.1
|
||||
jetty.version=6.1.14
|
||||
jetty-util.version=6.1.14
|
||||
junit.version=4.8.1
|
||||
jdiff.version=1.0.9
|
||||
json.version=1.0
|
||||
|
||||
kfs.version=0.3
|
||||
|
||||
log4j.version=1.2.15
|
||||
lucene-core.version=2.3.1
|
||||
|
||||
mina-core.version=2.0.0-M5
|
||||
|
||||
oro.version=2.0.8
|
||||
|
||||
protobuf.version=2.4.0a
|
||||
|
||||
rats-lib.version=0.6
|
||||
|
||||
servlet.version=4.0.6
|
||||
servlet-api-2.5.version=6.1.14
|
||||
servlet-api.version=2.5
|
||||
slf4j-api.version=1.5.11
|
||||
slf4j-log4j12.version=1.5.11
|
||||
|
||||
wagon-http.version=1.0-beta-2
|
||||
|
||||
xmlenc.version=0.52
|
||||
xerces.version=1.4.4
|
||||
|
||||
aspectj.version=1.6.5
|
||||
|
||||
mockito-all.version=1.8.5
|
||||
|
||||
jsch.version=0.1.42
|
||||
|
@ -1,86 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
# This file is used to fix the paths in CNDOCS_SRC/uming.conf, CNDOCS_SRC/src/documentation/sitemap.xmap
|
||||
|
||||
CNDOCS_SRC=$1
|
||||
|
||||
cat <<EOF > src/docs/cn/uming.conf
|
||||
<?xml version="1.0"?>
|
||||
<configuration>
|
||||
<fonts>
|
||||
<font metrics-file="$CNDOCS_SRC/uming.xml" kerning="yes" embed-file="$CNDOCS_SRC/uming.ttc">
|
||||
<font-triplet name="AR PL UMing" style="normal" weight="normal"/>
|
||||
<font-triplet name="AR PL UMing" style="italic" weight="normal"/>
|
||||
<font-triplet name="AR PL UMing" style="normal" weight="bold"/>
|
||||
<font-triplet name="AR PL UMing" style="italic" weight="bold"/>
|
||||
</font>
|
||||
</fonts>
|
||||
</configuration>
|
||||
EOF
|
||||
|
||||
cat <<EOF > src/docs/cn/src/documentation/sitemap.xmap
|
||||
<?xml version="1.0"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<map:sitemap xmlns:map="http://apache.org/cocoon/sitemap/1.0">
|
||||
<map:components>
|
||||
<map:serializers default="fo2pdf">
|
||||
<map:serializer name="fo2pdf"
|
||||
src="org.apache.cocoon.serialization.FOPSerializer"
|
||||
mime-type="application/pdf">
|
||||
<user-config src="$CNDOCS_SRC/uming.conf"/>
|
||||
</map:serializer>
|
||||
</map:serializers>
|
||||
</map:components>
|
||||
<map:pipelines>
|
||||
<map:pipeline>
|
||||
<!-- generate .pdf files from .fo -->
|
||||
<map:match type="regexp" pattern="^(.*?)([^/]*).pdf$">
|
||||
<map:select type="exists">
|
||||
<map:when test="{lm:project.{1}{2}.pdf}">
|
||||
<map:read src="{lm:project.{1}{2}.pdf}"/>
|
||||
</map:when>
|
||||
<map:when test="{lm:project.{1}{2}.fo}">
|
||||
<map:generate src="{lm:project.{1}{2}.fo}"/>
|
||||
<map:serialize type="fo2pdf"/>
|
||||
</map:when>
|
||||
<map:otherwise>
|
||||
<map:generate src="cocoon://{1}{2}.fo"/>
|
||||
<map:serialize type="fo2pdf"/>
|
||||
</map:otherwise>
|
||||
</map:select>
|
||||
</map:match>
|
||||
</map:pipeline>
|
||||
</map:pipelines>
|
||||
</map:sitemap>
|
||||
EOF
|
@ -1,78 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
# packageNativeHadoop.sh - A simple script to help package native-hadoop libraries
|
||||
|
||||
#
|
||||
# Note:
|
||||
# This script relies on the following environment variables to function correctly:
|
||||
# * BASE_NATIVE_LIB_DIR
|
||||
# * BUILD_NATIVE_DIR
|
||||
# * DIST_LIB_DIR
|
||||
# All these are setup by build.xml.
|
||||
#
|
||||
|
||||
TAR='tar cf -'
|
||||
UNTAR='tar xfBp -'
|
||||
|
||||
# Copy the pre-built libraries in $BASE_NATIVE_LIB_DIR
|
||||
if [ -d $BASE_NATIVE_LIB_DIR ]
|
||||
then
|
||||
for platform in `ls $BASE_NATIVE_LIB_DIR`
|
||||
do
|
||||
if [ ! -d $DIST_LIB_DIR ]
|
||||
then
|
||||
mkdir -p $DIST_LIB_DIR
|
||||
echo "Created $DIST_LIB_DIR"
|
||||
fi
|
||||
echo "Copying libraries in $BASE_NATIVE_LIB_DIR/$platform to $DIST_LIB_DIR/"
|
||||
cd $BASE_NATIVE_LIB_DIR/
|
||||
$TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
|
||||
done
|
||||
fi
|
||||
|
||||
# Copy the custom-built libraries in $BUILD_DIR
|
||||
if [ -d $BUILD_NATIVE_DIR ]
|
||||
then
|
||||
for platform in `ls $BUILD_NATIVE_DIR`
|
||||
do
|
||||
if [ ! -d $DIST_LIB_DIR ]
|
||||
then
|
||||
mkdir -p $DIST_LIB_DIR
|
||||
echo "Created $DIST_LIB_DIR"
|
||||
fi
|
||||
echo "Copying libraries in $BUILD_NATIVE_DIR/$platform/lib to $DIST_LIB_DIR/"
|
||||
cd $BUILD_NATIVE_DIR/$platform/lib
|
||||
$TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "${BUNDLE_SNAPPY_LIB}" = "true" ]
|
||||
then
|
||||
if [ -d ${SNAPPY_LIB_DIR} ]
|
||||
then
|
||||
echo "Copying Snappy library in ${SNAPPY_LIB_DIR} to $DIST_LIB_DIR/"
|
||||
cd ${SNAPPY_LIB_DIR}
|
||||
$TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
|
||||
else
|
||||
echo "Snappy lib directory ${SNAPPY_LIB_DIR} does not exist"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
#vim: ts=2: sw=2: et
|
@ -1,11 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
||||
|
||||
<!-- Put site-specific property overrides in this file. -->
|
||||
|
||||
<configuration>
|
||||
<property>
|
||||
<name>fs.default.name</name>
|
||||
<value>${HADOOP_NN_HOST}</value>
|
||||
</property>
|
||||
</configuration>
|
72
dev-support/smart-apply-patch.sh
Executable file
@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -e
|
||||
|
||||
PATCH_FILE=$1
|
||||
if [ -z "$PATCH_FILE" ]; then
|
||||
echo usage: $0 patch-file
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PATCH=${PATCH:-patch} # allow overriding patch binary
|
||||
|
||||
# Cleanup handler for temporary files
|
||||
TOCLEAN=""
|
||||
cleanup() {
|
||||
rm $TOCLEAN
|
||||
exit $1
|
||||
}
|
||||
trap "cleanup 1" HUP INT QUIT TERM
|
||||
|
||||
# Allow passing "-" for stdin patches
|
||||
if [ "$PATCH_FILE" == "-" ]; then
|
||||
PATCH_FILE=/tmp/tmp.in.$$
|
||||
cat /dev/fd/0 > $PATCH_FILE
|
||||
TOCLEAN="$TOCLEAN $PATCH_FILE"
|
||||
fi
|
||||
|
||||
# Come up with a list of changed files into $TMP
|
||||
TMP=/tmp/tmp.paths.$$
|
||||
TOCLEAN="$TOCLEAN $TMP"
|
||||
grep '^+++\|^---' $PATCH_FILE | cut -c '5-' | grep -v /dev/null | sort | uniq > $TMP
|
||||
|
||||
# Assume p0 to start
|
||||
PLEVEL=0
|
||||
|
||||
# if all of the lines start with a/ or b/, then this is a git patch that
|
||||
# was generated without --no-prefix
|
||||
if ! grep -qv '^a/\|^b/' $TMP ; then
|
||||
echo Looks like this is a git patch. Stripping a/ and b/ prefixes
|
||||
echo and incrementing PLEVEL
|
||||
PLEVEL=$[$PLEVEL + 1]
|
||||
sed -i -e 's,^[ab]/,,' $TMP
|
||||
fi
|
||||
|
||||
# if all of the lines start with common/, hdfs/, or mapreduce/, this is
|
||||
# relative to the hadoop root instead of the subproject root, so we need
|
||||
# to chop off another layer
|
||||
PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
|
||||
if [[ "$PREFIX_DIRS" =~ ^(hdfs|common|mapreduce)$ ]]; then
|
||||
|
||||
echo Looks like this is relative to project root. Increasing PLEVEL
|
||||
PLEVEL=$[$PLEVEL + 1]
|
||||
elif ! echo "$PREFIX_DIRS" | grep -vxq 'common\|hdfs\|mapreduce' ; then
|
||||
echo Looks like this is a cross-subproject patch. Not supported!
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo Going to apply patch with: $PATCH -p$PLEVEL
|
||||
$PATCH -p$PLEVEL -E < $PATCH_FILE
|
||||
|
||||
cleanup 0
|
@ -13,36 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
*~
|
||||
.classpath
|
||||
.project
|
||||
.settings
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
.idea
|
||||
.svn
|
||||
build/
|
||||
build-fi/
|
||||
build.properties
|
||||
conf/masters
|
||||
conf/slaves
|
||||
conf/hadoop-env.sh
|
||||
conf/hadoop-site.xml
|
||||
conf/core-site.xml
|
||||
conf/mapred-site.xml
|
||||
conf/hdfs-site.xml
|
||||
conf/hadoop-policy.xml
|
||||
conf/capacity-scheduler.xml
|
||||
conf/mapred-queue-acls.xml
|
||||
docs/api/
|
||||
ivy/hadoop-core.xml
|
||||
ivy/hadoop-core-test.xml
|
||||
ivy/ivy-*.jar
|
||||
ivy/maven-ant-tasks-*.jar
|
||||
logs/
|
||||
src/contrib/ec2/bin/hadoop-ec2-env.sh
|
||||
src/docs/build
|
||||
src/docs/cn/build
|
||||
src/docs/cn/src/documentation/sitemap.xmap
|
||||
src/docs/cn/uming.conf
|
||||
OK_RELEASEAUDIT_WARNINGS=0
|
||||
OK_FINDBUGS_WARNINGS=0
|
||||
OK_JAVADOC_WARNINGS=0
|
715
dev-support/test-patch.sh
Executable file
@ -0,0 +1,715 @@
|
||||
#!/usr/bin/env bash
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
#set -x
|
||||
ulimit -n 1024
|
||||
|
||||
### Setup some variables.
|
||||
### SVN_REVISION and BUILD_URL are set by Hudson if it is run by patch process
|
||||
### Read variables from properties file
|
||||
bindir=$(dirname $0)
|
||||
. $bindir/test-patch.properties
|
||||
|
||||
###############################################################################
|
||||
parseArgs() {
|
||||
case "$1" in
|
||||
HUDSON)
|
||||
### Set HUDSON to true to indicate that this script is being run by Hudson
|
||||
HUDSON=true
|
||||
if [[ $# != 16 ]] ; then
|
||||
echo "ERROR: usage $0 HUDSON <PATCH_DIR> <SUPPORT_DIR> <PS_CMD> <WGET_CMD> <JIRACLI> <SVN_CMD> <GREP_CMD> <PATCH_CMD> <FINDBUGS_HOME> <FORREST_HOME> <ECLIPSE_HOME> <WORKSPACE_BASEDIR> <JIRA_PASSWD> <CURL_CMD> <DEFECT> "
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
PATCH_DIR=$2
|
||||
SUPPORT_DIR=$3
|
||||
PS=$4
|
||||
WGET=$5
|
||||
JIRACLI=$6
|
||||
SVN=$7
|
||||
GREP=$8
|
||||
PATCH=$9
|
||||
FINDBUGS_HOME=${10}
|
||||
FORREST_HOME=${11}
|
||||
ECLIPSE_HOME=${12}
|
||||
BASEDIR=${13}
|
||||
JIRA_PASSWD=${14}
|
||||
CURL=${15}
|
||||
defect=${16}
|
||||
|
||||
### Retrieve the defect number
|
||||
if [ -z "$defect" ] ; then
|
||||
echo "Could not determine the patch to test. Exiting."
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
|
||||
if [ ! -e "$PATCH_DIR" ] ; then
|
||||
mkdir -p $PATCH_DIR
|
||||
fi
|
||||
|
||||
ECLIPSE_PROPERTY="-Declipse.home=$ECLIPSE_HOME"
|
||||
;;
|
||||
DEVELOPER)
|
||||
### Set HUDSON to false to indicate that this script is being run by a developer
|
||||
HUDSON=false
|
||||
if [[ $# != 9 ]] ; then
|
||||
echo "ERROR: usage $0 DEVELOPER <PATCH_FILE> <SCRATCH_DIR> <SVN_CMD> <GREP_CMD> <PATCH_CMD> <FINDBUGS_HOME> <FORREST_HOME> <WORKSPACE_BASEDIR>"
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
### PATCH_FILE contains the location of the patchfile
|
||||
PATCH_FILE=$2
|
||||
if [[ ! -e "$PATCH_FILE" ]] ; then
|
||||
echo "Unable to locate the patch file $PATCH_FILE"
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
PATCH_DIR=$3
|
||||
### Check if $PATCH_DIR exists. If it does not exist, create a new directory
|
||||
if [[ ! -e "$PATCH_DIR" ]] ; then
|
||||
mkdir "$PATCH_DIR"
|
||||
if [[ $? == 0 ]] ; then
|
||||
echo "$PATCH_DIR has been created"
|
||||
else
|
||||
echo "Unable to create $PATCH_DIR"
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
fi
|
||||
SVN=$4
|
||||
GREP=$5
|
||||
PATCH=$6
|
||||
FINDBUGS_HOME=$7
|
||||
FORREST_HOME=$8
|
||||
BASEDIR=$9
|
||||
### Obtain the patch filename to append it to the version number
|
||||
defect=`basename $PATCH_FILE`
|
||||
;;
|
||||
*)
|
||||
echo "ERROR: usage $0 HUDSON [args] | DEVELOPER [args]"
|
||||
cleanupAndExit 0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
checkout () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Testing patch for ${defect}."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
### When run by a developer, if the workspace contains modifications, do not continue
|
||||
status=`$SVN stat --ignore-externals | sed -e '/^X[ ]*/D'`
|
||||
if [[ $HUDSON == "false" ]] ; then
|
||||
if [[ "$status" != "" ]] ; then
|
||||
echo "ERROR: can't run in a workspace that contains the following modifications"
|
||||
echo "$status"
|
||||
cleanupAndExit 1
|
||||
fi
|
||||
echo
|
||||
else
|
||||
cd $BASEDIR
|
||||
$SVN revert -R .
|
||||
rm -rf `$SVN status --no-ignore`
|
||||
$SVN update
|
||||
fi
|
||||
return $?
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
setup () {
|
||||
### Download latest patch file (ignoring .htm and .html) when run from patch process
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
$WGET -q -O $PATCH_DIR/jira http://issues.apache.org/jira/browse/$defect
|
||||
if [[ `$GREP -c 'Patch Available' $PATCH_DIR/jira` == 0 ]] ; then
|
||||
echo "$defect is not \"Patch Available\". Exiting."
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
relativePatchURL=`$GREP -o '"/jira/secure/attachment/[0-9]*/[^"]*' $PATCH_DIR/jira | $GREP -v -e 'htm[l]*$' | sort | tail -1 | $GREP -o '/jira/secure/attachment/[0-9]*/[^"]*'`
|
||||
patchURL="http://issues.apache.org${relativePatchURL}"
|
||||
patchNum=`echo $patchURL | $GREP -o '[0-9]*/' | $GREP -o '[0-9]*'`
|
||||
echo "$defect patch is being downloaded at `date` from"
|
||||
echo "$patchURL"
|
||||
$WGET -q -O $PATCH_DIR/patch $patchURL
|
||||
VERSION=${SVN_REVISION}_${defect}_PATCH-${patchNum}
|
||||
JIRA_COMMENT="Here are the results of testing the latest attachment
|
||||
$patchURL
|
||||
against trunk revision ${SVN_REVISION}."
|
||||
|
||||
### Copy in any supporting files needed by this process
|
||||
cp -r $SUPPORT_DIR/lib/* ./lib
|
||||
#PENDING: cp -f $SUPPORT_DIR/etc/checkstyle* ./src/test
|
||||
### Copy the patch file to $PATCH_DIR
|
||||
else
|
||||
VERSION=PATCH-${defect}
|
||||
cp $PATCH_FILE $PATCH_DIR/patch
|
||||
if [[ $? == 0 ]] ; then
|
||||
echo "Patch file $PATCH_FILE copied to $PATCH_DIR"
|
||||
else
|
||||
echo "Could not copy $PATCH_FILE to $PATCH_DIR"
|
||||
cleanupAndExit 0
|
||||
fi
|
||||
fi
|
||||
### exit if warnings are NOT defined in the properties file
|
||||
if [ -z "$OK_FINDBUGS_WARNINGS" ] || [[ -z "$OK_JAVADOC_WARNINGS" ]] || [[ -z $OK_RELEASEAUDIT_WARNINGS ]]; then
|
||||
echo "Please define the following properties in test-patch.properties file"
|
||||
echo "OK_FINDBUGS_WARNINGS"
|
||||
echo "OK_RELEASEAUDIT_WARNINGS"
|
||||
echo "OK_JAVADOC_WARNINGS"
|
||||
cleanupAndExit 1
|
||||
fi
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Pre-build trunk to verify trunk stability and javac warnings"
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
# echo "$ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
|
||||
# $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
|
||||
$MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
|
||||
if [[ $? != 0 ]] ; then
|
||||
echo "Trunk compilation is broken?"
|
||||
cleanupAndExit 1
|
||||
fi
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check for @author tags in the patch
|
||||
checkAuthor () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Checking there are no @author tags in the patch."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
authorTags=`$GREP -c -i '@author' $PATCH_DIR/patch`
|
||||
echo "There appear to be $authorTags @author tags in the patch."
|
||||
if [[ $authorTags != 0 ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 @author. The patch appears to contain $authorTags @author tags which the Hadoop community has agreed to not allow in code contributions."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 @author. The patch does not contain any @author tags."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check for tests in the patch
|
||||
checkTests () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Checking there are new or changed tests in the patch."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
testReferences=`$GREP -c -i '/test' $PATCH_DIR/patch`
|
||||
echo "There appear to be $testReferences test files referenced in the patch."
|
||||
if [[ $testReferences == 0 ]] ; then
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
patchIsDoc=`$GREP -c -i 'title="documentation' $PATCH_DIR/jira`
|
||||
if [[ $patchIsDoc != 0 ]] ; then
|
||||
echo "The patch appears to be a documentation patch that doesn't require tests."
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+0 tests included. The patch appears to be a documentation patch that doesn't require tests."
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 tests included. The patch doesn't appear to include any new or modified tests.
|
||||
Please justify why no new tests are needed for this patch.
|
||||
Also please list what manual steps were performed to verify this patch."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 tests included. The patch appears to include $testReferences new or modified tests."
|
||||
return 0
|
||||
}
|
||||
|
||||
cleanUpXml () {
|
||||
cd $BASEDIR/conf
|
||||
for file in `ls *.xml.template`
|
||||
do
|
||||
rm -f `basename $file .template`
|
||||
done
|
||||
cd $BASEDIR
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Attempt to apply the patch
|
||||
applyPatch () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Applying patch."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
export PATCH
|
||||
$bindir/smart-apply-patch.sh $PATCH_DIR/patch
|
||||
if [[ $? != 0 ]] ; then
|
||||
echo "PATCH APPLICATION FAILED"
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 patch. The patch command could not apply the patch."
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check there are no javadoc warnings
|
||||
checkJavadocWarnings () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Determining number of patched javadoc warnings."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt"
|
||||
(cd root; mvn install)
|
||||
(cd doclet; mvn install)
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
|
||||
$MAVEN_HOME/bin/mvn clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
|
||||
javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | awk '/Javadoc Warnings/,EOF' | $GREP -v 'Javadoc Warnings' | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
|
||||
echo ""
|
||||
echo ""
|
||||
echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
|
||||
|
||||
### if current warnings greater than OK_JAVADOC_WARNINGS
|
||||
if [[ $javadocWarnings > $OK_JAVADOC_WARNINGS ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 javadoc. The javadoc tool appears to have generated `expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 javadoc. The javadoc tool did not generate any warning messages."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check there are no changes in the number of Javac warnings
|
||||
checkJavacWarnings () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Determining number of patched javac warnings."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
|
||||
$MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
|
||||
if [[ $? != 0 ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 javac. The patch appears to cause tar ant target to fail."
|
||||
return 1
|
||||
fi
|
||||
### Compare trunk and patch javac warning numbers
|
||||
if [[ -f $PATCH_DIR/patchJavacWarnings.txt ]] ; then
|
||||
trunkJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/trunkJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
|
||||
patchJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
|
||||
echo "There appear to be $trunkJavacWarnings javac compiler warnings before the patch and $patchJavacWarnings javac compiler warnings after applying the patch."
|
||||
if [[ $patchJavacWarnings != "" && $trunkJavacWarnings != "" ]] ; then
|
||||
if [[ $patchJavacWarnings -gt $trunkJavacWarnings ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 javac. The applied patch generated $patchJavacWarnings javac compiler warnings (more than the trunk's current $trunkJavacWarnings warnings)."
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 javac. The applied patch does not increase the total number of javac compiler warnings."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check there are no changes in the number of release audit (RAT) warnings
|
||||
checkReleaseAuditWarnings () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Determining number of patched release audit warnings."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
|
||||
$MAVEN_HOME/bin/mvn apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
|
||||
find . -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
|
||||
|
||||
### Compare trunk and patch release audit warning numbers
|
||||
if [[ -f $PATCH_DIR/patchReleaseAuditWarnings.txt ]] ; then
|
||||
patchReleaseAuditWarnings=`$GREP -c '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt`
|
||||
echo ""
|
||||
echo ""
|
||||
echo "There appear to be $OK_RELEASEAUDIT_WARNINGS release audit warnings before the patch and $patchReleaseAuditWarnings release audit warnings after applying the patch."
|
||||
if [[ $patchReleaseAuditWarnings != "" && $OK_RELEASEAUDIT_WARNINGS != "" ]] ; then
|
||||
if [[ $patchReleaseAuditWarnings -gt $OK_RELEASEAUDIT_WARNINGS ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 release audit. The applied patch generated $patchReleaseAuditWarnings release audit warnings (more than the trunk's current $OK_RELEASEAUDIT_WARNINGS warnings)."
|
||||
$GREP '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt > $PATCH_DIR/patchReleaseAuditProblems.txt
|
||||
echo "Lines that start with ????? in the release audit report indicate files that do not have an Apache license header." >> $PATCH_DIR/patchReleaseAuditProblems.txt
|
||||
JIRA_COMMENT_FOOTER="Release audit warnings: $BUILD_URL/artifact/trunk/patchprocess/patchReleaseAuditProblems.txt
|
||||
$JIRA_COMMENT_FOOTER"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 release audit. The applied patch does not increase the total number of release audit warnings."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check there are no changes in the number of Checkstyle warnings
|
||||
checkStyle () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Determining number of patched checkstyle warnings."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
echo "THIS IS NOT IMPLEMENTED YET"
|
||||
echo ""
|
||||
echo ""
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
|
||||
$MAVEN_HOME/bin/mvn compile checkstyle:checkstyle -D${PROJECT_NAME}PatchProcess
|
||||
|
||||
JIRA_COMMENT_FOOTER="Checkstyle results: $BUILD_URL/artifact/trunk/build/test/checkstyle-errors.html
|
||||
$JIRA_COMMENT_FOOTER"
|
||||
### TODO: calculate actual patchStyleErrors
|
||||
# patchStyleErrors=0
|
||||
# if [[ $patchStyleErrors != 0 ]] ; then
|
||||
# JIRA_COMMENT="$JIRA_COMMENT
|
||||
#
|
||||
# -1 checkstyle. The patch generated $patchStyleErrors code style errors."
|
||||
# return 1
|
||||
# fi
|
||||
# JIRA_COMMENT="$JIRA_COMMENT
|
||||
#
|
||||
# +1 checkstyle. The patch generated 0 code style errors."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Check there are no changes in the number of Findbugs warnings
|
||||
checkFindbugsWarnings () {
|
||||
findbugs_version=`${FINDBUGS_HOME}/bin/findbugs -version`
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Determining number of patched Findbugs warnings."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=$FINDBUGS_HOME -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
|
||||
$MAVEN_HOME/bin/mvn clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess -X
|
||||
|
||||
if [ $? != 0 ] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 findbugs. The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/target/newPatchFindbugsWarnings.html
|
||||
$JIRA_COMMENT_FOOTER"
|
||||
|
||||
cp $BASEDIR/hadoop-common/target/findbugsXml.xml $PATCH_DIR/patchFindbugsWarnings.xml
|
||||
$FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
|
||||
$PATCH_DIR/patchFindbugsWarnings.xml \
|
||||
$PATCH_DIR/patchFindbugsWarnings.xml
|
||||
findbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings.xml \
|
||||
$PATCH_DIR/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
|
||||
$FINDBUGS_HOME/bin/convertXmlToText -html \
|
||||
$PATCH_DIR/newPatchFindbugsWarnings.xml \
|
||||
$PATCH_DIR/newPatchFindbugsWarnings.html
|
||||
|
||||
### if current warnings greater than OK_FINDBUGS_WARNINGS
|
||||
if [[ $findbugsWarnings > $OK_FINDBUGS_WARNINGS ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 findbugs. The patch appears to introduce `expr $(($findbugsWarnings-$OK_FINDBUGS_WARNINGS))` new Findbugs (version ${findbugs_version}) warnings."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 findbugs. The patch does not introduce any new Findbugs (version ${findbugs_version}) warnings."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Run the test-core target
|
||||
runCoreTests () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Running core tests."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
|
||||
### Kill any rogue build processes from the last attempt
|
||||
$PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
|
||||
PreTestTarget=""
|
||||
if [[ $defect == MAPREDUCE-* ]] ; then
|
||||
PreTestTarget="create-c++-configure"
|
||||
fi
|
||||
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core
|
||||
$MAVEN_HOME/bin/mvn clean test -Pnative -DHadoopPatchProcess
|
||||
if [[ $? != 0 ]] ; then
|
||||
### Find and format names of failed tests
|
||||
failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/target/hadoop-common/surefire-reports/*.xml | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 core tests. The patch failed these core unit tests:
|
||||
$failed_tests"
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 core tests. The patch passed core unit tests."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Run the test-contrib target
|
||||
runContribTests () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Running contrib tests."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
|
||||
if [[ `$GREP -c 'test-contrib' build.xml` == 0 ]] ; then
|
||||
echo "No contrib tests in this project."
|
||||
return 0
|
||||
fi
|
||||
|
||||
### Kill any rogue build processes from the last attempt
|
||||
$PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
|
||||
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib
|
||||
echo "NOP"
|
||||
if [[ $? != 0 ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 contrib tests. The patch failed contrib unit tests."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 contrib tests. The patch passed contrib unit tests."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Run the inject-system-faults target
|
||||
checkInjectSystemFaults () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Checking the integrity of system test framework code."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
|
||||
### Kill any rogue build processes from the last attempt
|
||||
$PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
|
||||
|
||||
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
|
||||
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
|
||||
echo "NOP"
|
||||
if [[ $? != 0 ]] ; then
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
-1 system test framework. The patch failed system test framework compile."
|
||||
return 1
|
||||
fi
|
||||
JIRA_COMMENT="$JIRA_COMMENT
|
||||
|
||||
+1 system test framework. The patch passed system test framework compile."
|
||||
return 0
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Submit a comment to the defect's Jira
|
||||
submitJiraComment () {
|
||||
local result=$1
|
||||
### Do not output the value of JIRA_COMMENT_FOOTER when run by a developer
|
||||
if [[ $HUDSON == "false" ]] ; then
|
||||
JIRA_COMMENT_FOOTER=""
|
||||
fi
|
||||
if [[ $result == 0 ]] ; then
|
||||
comment="+1 overall. $JIRA_COMMENT
|
||||
|
||||
$JIRA_COMMENT_FOOTER"
|
||||
else
|
||||
comment="-1 overall. $JIRA_COMMENT
|
||||
|
||||
$JIRA_COMMENT_FOOTER"
|
||||
fi
|
||||
### Output the test result to the console
|
||||
echo "
|
||||
|
||||
|
||||
|
||||
$comment"
|
||||
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Adding comment to Jira."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
### Update Jira with a comment
|
||||
export USER=hudson
|
||||
$JIRACLI -s https://issues.apache.org/jira -a addcomment -u hadoopqa -p $JIRA_PASSWD --comment "$comment" --issue $defect
|
||||
$JIRACLI -s https://issues.apache.org/jira -a logout -u hadoopqa -p $JIRA_PASSWD
|
||||
fi
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
### Cleanup files
|
||||
cleanupAndExit () {
|
||||
local result=$1
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
if [ -e "$PATCH_DIR" ] ; then
|
||||
mv $PATCH_DIR $BASEDIR
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
echo ""
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo " Finished build."
|
||||
echo "======================================================================"
|
||||
echo "======================================================================"
|
||||
echo ""
|
||||
echo ""
|
||||
exit $result
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
###############################################################################
|
||||
###############################################################################
|
||||
|
||||
JIRA_COMMENT=""
|
||||
JIRA_COMMENT_FOOTER="Console output: $BUILD_URL/console
|
||||
|
||||
This message is automatically generated."
|
||||
|
||||
### Check if arguments to the script have been specified properly or not
|
||||
parseArgs $@
|
||||
cd $BASEDIR
|
||||
|
||||
checkout
|
||||
RESULT=$?
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
if [[ $RESULT != 0 ]] ; then
|
||||
exit 100
|
||||
fi
|
||||
fi
|
||||
setup
|
||||
checkAuthor
|
||||
RESULT=$?
|
||||
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
cleanUpXml
|
||||
fi
|
||||
checkTests
|
||||
(( RESULT = RESULT + $? ))
|
||||
applyPatch
|
||||
if [[ $? != 0 ]] ; then
|
||||
submitJiraComment 1
|
||||
cleanupAndExit 1
|
||||
fi
|
||||
checkJavadocWarnings
|
||||
(( RESULT = RESULT + $? ))
|
||||
checkJavacWarnings
|
||||
(( RESULT = RESULT + $? ))
|
||||
### Checkstyle not implemented yet
|
||||
#checkStyle
|
||||
#(( RESULT = RESULT + $? ))
|
||||
checkFindbugsWarnings
|
||||
(( RESULT = RESULT + $? ))
|
||||
checkReleaseAuditWarnings
|
||||
(( RESULT = RESULT + $? ))
|
||||
### Do not call these when run by a developer
|
||||
if [[ $HUDSON == "true" ]] ; then
|
||||
runCoreTests
|
||||
(( RESULT = RESULT + $? ))
|
||||
runContribTests
|
||||
(( RESULT = RESULT + $? ))
|
||||
fi
|
||||
checkInjectSystemFaults
|
||||
(( RESULT = RESULT + $? ))
|
||||
JIRA_COMMENT_FOOTER="Test results: $BUILD_URL/testReport/
|
||||
$JIRA_COMMENT_FOOTER"
|
||||
|
||||
submitJiraComment $RESULT
|
||||
cleanupAndExit $RESULT
|
38
hadoop-annotations/pom.xml
Normal file
@ -0,0 +1,38 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. See accompanying LICENSE file.
|
||||
-->
|
||||
<project>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-project</artifactId>
|
||||
<version>0.23.0-SNAPSHOT</version>
|
||||
<relativePath>../hadoop-project</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
<version>0.23.0-SNAPSHOT</version>
|
||||
<description>Apache Hadoop Annotations</description>
|
||||
<name>Apache Hadoop Annotations</name>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>jdiff</groupId>
|
||||
<artifactId>jdiff</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
105
hadoop-assemblies/pom.xml
Normal file
@ -0,0 +1,105 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<project>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-project</artifactId>
|
||||
<version>0.23.0-SNAPSHOT</version>
|
||||
<relativePath>../hadoop-project</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-assemblies</artifactId>
|
||||
<version>0.23.0-SNAPSHOT</version>
|
||||
<name>Apache Hadoop Assemblies</name>
|
||||
<description>Apache Hadoop Assemblies</description>
|
||||
|
||||
<properties>
|
||||
<failIfNoTests>false</failIfNoTests>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<version>1.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.2-beta-3</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.rat</groupId>
|
||||
<artifactId>apache-rat-plugin</artifactId>
|
||||
<version>0.7</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
<configuration>
|
||||
<rules>
|
||||
<requireMavenVersion>
|
||||
<version>[3.0.0,)</version>
|
||||
</requireMavenVersion>
|
||||
<requireJavaVersion>
|
||||
<version>1.6</version>
|
||||
</requireJavaVersion>
|
||||
<requireOS>
|
||||
<family>unix</family>
|
||||
</requireOS>
|
||||
</rules>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>clean</id>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<phase>pre-clean</phase>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>default</id>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<phase>validate</phase>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>site</id>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<phase>pre-site</phase>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.rat</groupId>
|
||||
<artifactId>apache-rat-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -0,0 +1,113 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<assembly>
|
||||
<id>hadoop-bintar</id>
|
||||
<formats>
|
||||
<format>dir</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/bin</directory>
|
||||
<outputDirectory>/bin</outputDirectory>
|
||||
<includes>
|
||||
<include>hadoop</include>
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/conf</directory>
|
||||
<outputDirectory>/etc/hadoop</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/bin</directory>
|
||||
<outputDirectory>/libexec</outputDirectory>
|
||||
<includes>
|
||||
<include>hadoop-config.sh</include>
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/bin</directory>
|
||||
<outputDirectory>/sbin</outputDirectory>
|
||||
<includes>
|
||||
<include>*.sh</include>
|
||||
</includes>
|
||||
<excludes>
|
||||
<exclude>hadoop-config.sh</exclude>
|
||||
</excludes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/packages</directory>
|
||||
<outputDirectory>/sbin</outputDirectory>
|
||||
<includes>
|
||||
<include>*.sh</include>
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}</directory>
|
||||
<outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
|
||||
<includes>
|
||||
<include>*.txt</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/webapps</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/conf</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/templates</outputDirectory>
|
||||
<includes>
|
||||
<include>*-site.xml</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${project.build.directory}</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}</outputDirectory>
|
||||
<includes>
|
||||
<include>${project.artifactId}-${project.version}.jar</include>
|
||||
<include>${project.artifactId}-${project.version}-tests.jar</include>
|
||||
<include>${project.artifactId}-${project.version}-sources.jar</include>
|
||||
<include>${project.artifactId}-${project.version}-test-sources.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/dev-support/jdiff</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${project.build.directory}/site/jdiff/xml</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
|
||||
<unpack>false</unpack>
|
||||
<scope>compile</scope>
|
||||
<useProjectArtifact>false</useProjectArtifact>
|
||||
<excludes>
|
||||
<exclude>org.apache.ant:*:jar</exclude>
|
||||
<exclude>org.apache.hadoop:hadoop-*:jar</exclude>
|
||||
<exclude>jdiff:jdiff:jar</exclude>
|
||||
</excludes>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
</assembly>
|
@ -0,0 +1,37 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>hadoop-src</id>
|
||||
<formats>
|
||||
<format>dir</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${project.basedir}</directory>
|
||||
<outputDirectory>src/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<excludes>
|
||||
<exclude>**/*.log</exclude>
|
||||
<exclude>**/build/**</exclude>
|
||||
<exclude>**/target/**</exclude>
|
||||
</excludes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
</assembly>
|
@ -0,0 +1,85 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<assembly>
|
||||
<id>hadoop-tar</id>
|
||||
<formats>
|
||||
<format>dir</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${basedir}</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<includes>
|
||||
<include>*.txt</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/bin</directory>
|
||||
<outputDirectory>/bin</outputDirectory>
|
||||
<includes>
|
||||
<include>*</include>
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/bin</directory>
|
||||
<outputDirectory>/libexec</outputDirectory>
|
||||
<includes>
|
||||
<include>hadoop-config.sh</include>
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/conf</directory>
|
||||
<outputDirectory>/conf</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/webapps</directory>
|
||||
<outputDirectory>/webapps</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${project.build.directory}/site</directory>
|
||||
<outputDirectory>/docs</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${project.build.directory}</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<includes>
|
||||
<include>${project.artifactId}-${project.version}.jar</include>
|
||||
<include>${project.artifactId}-${project.version}-tests.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${project.build.directory}/src</directory>
|
||||
<outputDirectory>/src</outputDirectory>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<outputDirectory>/lib</outputDirectory>
|
||||
<unpack>false</unpack>
|
||||
<scope>compile</scope>
|
||||
<useProjectArtifact>false</useProjectArtifact>
|
||||
<excludes>
|
||||
<exclude>org.apache.ant:*:jar</exclude>
|
||||
<exclude>org.apache.hadoop:hadoop-*:jar</exclude>
|
||||
<exclude>jdiff:jdiff:jar</exclude>
|
||||
</excludes>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
</assembly>
|
61
hadoop-common/BUILDING.txt
Normal file
@ -0,0 +1,61 @@
|
||||
----------------------------------------------------------------------------------
|
||||
Requirements:
|
||||
|
||||
* Unix System
|
||||
* JDK 1.6
|
||||
* Maven 3.0
|
||||
* Forrest 0.8 (if generating docs)
|
||||
* Findbugs 1.3.9 (if running findbugs)
|
||||
* Autotools (if compiling native code)
|
||||
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
|
||||
|
||||
----------------------------------------------------------------------------------
|
||||
Maven modules:
|
||||
|
||||
hadoop (Main Hadoop project)
|
||||
- hadoop-project (Parent POM for all Hadoop Maven modules. )
|
||||
(All plugins & dependencies versions are defined here.)
|
||||
- hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
|
||||
- hadoop-common (Hadoop common)
|
||||
|
||||
----------------------------------------------------------------------------------
|
||||
Where to run Maven from?
|
||||
|
||||
It can be run from any module. The only catch is that if not run from utrunk
|
||||
all modules that are not part of the build run must be installed in the local
|
||||
Maven cache or available in a Maven repository.
|
||||
|
||||
----------------------------------------------------------------------------------
|
||||
Maven build goals:
|
||||
|
||||
* Clean : mvn clean
|
||||
* Compile : mvn compile [-Pnative]
|
||||
* Run tests : mvn test [-Pnative]
|
||||
* Create JAR : mvn package
|
||||
* Run findbugs : mvn compile findbugs:findbugs
|
||||
* Run checkstyle : mvn compile checkstyle:checkstyle
|
||||
* Install JAR in M2 cache : mvn install
|
||||
* Deploy JAR to Maven repo : mvn deploy
|
||||
* Run clover : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
|
||||
* Run Rat : mvn apache-rat:check
|
||||
* Build javadocs : mvn javadoc:javadoc
|
||||
* Build TAR : mvn package [-Ptar][-Pbintar][-Pdocs][-Psrc][-Pnative]
|
||||
|
||||
Build options:
|
||||
|
||||
* Use -Pnative to compile/bundle native code
|
||||
* Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
|
||||
Snappy JNI bindings and to bundle Snappy SO files
|
||||
* Use -Pdocs to generate & bundle the documentation in the TAR (using -Ptar)
|
||||
* Use -Psrc to bundle the source in the TAR (using -Ptar)
|
||||
|
||||
Tests options:
|
||||
|
||||
* Use -DskipTests to skip tests when running the following Maven goals:
|
||||
'package', 'install', 'deploy' or 'verify'
|
||||
* -Dtest=<TESTCLASSNAME>,....
|
||||
* -Dtest.exclude=<TESTCLASSNAME>
|
||||
* -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
|
||||
|
||||
|
||||
----------------------------------------------------------------------------------
|
@ -296,7 +296,19 @@ Trunk (unreleased changes)
|
||||
|
||||
HADOOP-7491. hadoop command should respect HADOOP_OPTS when given
|
||||
a class name. (eli)
|
||||
|
||||
|
||||
HADOOP-7178. Add a parameter, useRawLocalFileSystem, to copyToLocalFile(..)
|
||||
in FileSystem. (Uma Maheswara Rao G via szetszwo)
|
||||
|
||||
HADOOP-6671. Use maven for hadoop common builds. (Alejandro Abdelnur
|
||||
via tomwhite)
|
||||
|
||||
HADOOP-7502. Make generated sources IDE friendly.
|
||||
(Alejandro Abdelnur via llu)
|
||||
|
||||
HADOOP-7501. Publish Hadoop Common artifacts (post HADOOP-6671) to Apache
|
||||
SNAPSHOTs repo. (Alejandro Abdelnur via tomwhite)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
|
||||
@ -437,6 +449,9 @@ Trunk (unreleased changes)
|
||||
HADOOP-7468 hadoop-core JAR contains a log4j.properties file.
|
||||
(Jolly Chen)
|
||||
|
||||
HADOOP-7508. Compiled nativelib is in wrong directory and it is not picked
|
||||
up by surefire setup. (Alejandro Abdelnur via tomwhite)
|
||||
|
||||
Release 0.22.0 - Unreleased
|
||||
|
||||
INCOMPATIBLE CHANGES
|
@ -51,7 +51,7 @@
|
||||
|
||||
<!-- Checks that a package.html file exists for each package. -->
|
||||
<!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
|
||||
<module name="PackageHtml"/>
|
||||
<module name="JavadocPackage"/>
|
||||
|
||||
<!-- Checks whether files end with a new line. -->
|
||||
<!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
|
||||
@ -61,6 +61,8 @@
|
||||
<!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
|
||||
<module name="Translation"/>
|
||||
|
||||
<module name="FileLength"/>
|
||||
<module name="FileTabCharacter"/>
|
||||
|
||||
<module name="TreeWalker">
|
||||
|
||||
@ -112,7 +114,6 @@
|
||||
|
||||
<!-- Checks for Size Violations. -->
|
||||
<!-- See http://checkstyle.sf.net/config_sizes.html -->
|
||||
<module name="FileLength"/>
|
||||
<module name="LineLength"/>
|
||||
<module name="MethodLength"/>
|
||||
<module name="ParameterNumber"/>
|
||||
@ -126,7 +127,6 @@
|
||||
<module name="NoWhitespaceBefore"/>
|
||||
<module name="ParenPad"/>
|
||||
<module name="TypecastParenPad"/>
|
||||
<module name="TabCharacter"/>
|
||||
<module name="WhitespaceAfter">
|
||||
<property name="tokens" value="COMMA, SEMI"/>
|
||||
</module>
|
@ -43,14 +43,20 @@ else
|
||||
branch="Unknown"
|
||||
url="file://$cwd"
|
||||
fi
|
||||
srcChecksum=`find src -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
|
||||
|
||||
mkdir -p $build_dir/src/org/apache/hadoop
|
||||
which md5sum > /dev/null
|
||||
if [ "$?" = "0" ] ; then
|
||||
srcChecksum=`find src/main/java -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
|
||||
else
|
||||
srcChecksum="Not Available"
|
||||
fi
|
||||
|
||||
mkdir -p $build_dir/org/apache/hadoop
|
||||
cat << EOF | \
|
||||
sed -e "s/VERSION/$version/" -e "s/USER/$user/" -e "s/DATE/$date/" \
|
||||
-e "s|URL|$url|" -e "s/REV/$revision/" \
|
||||
-e "s|BRANCH|$branch|" -e "s/SRCCHECKSUM/$srcChecksum/" \
|
||||
> $build_dir/src/org/apache/hadoop/package-info.java
|
||||
> $build_dir/org/apache/hadoop/package-info.java
|
||||
/*
|
||||
* Generated by src/saveVersion.sh
|
||||
*/
|
1019
hadoop-common/pom.xml
Normal file
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 5.7 KiB After Width: | Height: | Size: 5.7 KiB |
Before Width: | Height: | Size: 6.5 KiB After Width: | Height: | Size: 6.5 KiB |
Before Width: | Height: | Size: 766 B After Width: | Height: | Size: 766 B |
Before Width: | Height: | Size: 125 KiB After Width: | Height: | Size: 125 KiB |
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 9.2 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 29 KiB |