HADOOP-14671. Upgrade Apache Yetus to 0.8.0
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
parent
8e54da1511
commit
4b8c2b1c1c
@ -617,8 +617,8 @@ function makearelease
|
|||||||
#shellcheck disable=SC2038
|
#shellcheck disable=SC2038
|
||||||
find . -name rat.txt | xargs -I% cat % > "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-rat.txt"
|
find . -name rat.txt | xargs -I% cat % > "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-rat.txt"
|
||||||
|
|
||||||
# Stage CHANGES and RELEASENOTES files
|
# Stage CHANGELOG and RELEASENOTES files
|
||||||
for i in CHANGES RELEASENOTES; do
|
for i in CHANGELOG RELEASENOTES; do
|
||||||
run cp -p \
|
run cp -p \
|
||||||
"${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}"/${i}*.md \
|
"${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}"/${i}*.md \
|
||||||
"${ARTIFACTS_DIR}/${i}.md"
|
"${ARTIFACTS_DIR}/${i}.md"
|
||||||
|
@ -73,7 +73,7 @@ WANTED="$1"
|
|||||||
shift
|
shift
|
||||||
ARGV=("$@")
|
ARGV=("$@")
|
||||||
|
|
||||||
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.4.0}
|
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.8.0}
|
||||||
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
|
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
|
||||||
BINDIR=$(dirname "${BIN}")
|
BINDIR=$(dirname "${BIN}")
|
||||||
|
|
||||||
|
@ -22,6 +22,8 @@ FROM ubuntu:xenial
|
|||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
|
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||||
|
|
||||||
#####
|
#####
|
||||||
# Disable suggests/recommends
|
# Disable suggests/recommends
|
||||||
#####
|
#####
|
||||||
@ -38,7 +40,9 @@ ENV DEBCONF_TERSE true
|
|||||||
# WARNING: DO NOT PUT JAVA APPS HERE! Otherwise they will install default
|
# WARNING: DO NOT PUT JAVA APPS HERE! Otherwise they will install default
|
||||||
# Ubuntu Java. See Java section below!
|
# Ubuntu Java. See Java section below!
|
||||||
######
|
######
|
||||||
RUN apt-get -q update && apt-get -q install -y \
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends \
|
||||||
apt-utils \
|
apt-utils \
|
||||||
build-essential \
|
build-essential \
|
||||||
bzip2 \
|
bzip2 \
|
||||||
@ -74,84 +78,88 @@ RUN apt-get -q update && apt-get -q install -y \
|
|||||||
snappy \
|
snappy \
|
||||||
sudo \
|
sudo \
|
||||||
valgrind \
|
valgrind \
|
||||||
zlib1g-dev
|
zlib1g-dev \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
||||||
#######
|
#######
|
||||||
# OpenJDK 8
|
# OpenJDK 8
|
||||||
#######
|
#######
|
||||||
RUN apt-get -q install -y openjdk-8-jdk
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends openjdk-8-jdk libbcprov-java \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
#######
|
|
||||||
# OpenJDK 9
|
|
||||||
# w/workaround for
|
|
||||||
# https://bugs.launchpad.net/ubuntu/+source/openjdk-9/+bug/1593191
|
|
||||||
#######
|
|
||||||
RUN apt-get -o Dpkg::Options::="--force-overwrite" \
|
|
||||||
-q install -y \
|
|
||||||
openjdk-9-jdk-headless
|
|
||||||
|
|
||||||
#######
|
|
||||||
# Set default Java
|
|
||||||
#######
|
|
||||||
#
|
|
||||||
# By default, OpenJDK sets the default Java to the highest version.
|
|
||||||
# We want the opposite, soooooo....
|
|
||||||
#
|
|
||||||
RUN update-java-alternatives --set java-1.8.0-openjdk-amd64
|
|
||||||
RUN update-alternatives --get-selections | grep -i jdk | \
|
|
||||||
while read line; do \
|
|
||||||
alternative=$(echo $line | awk '{print $1}'); \
|
|
||||||
path=$(echo $line | awk '{print $3}'); \
|
|
||||||
newpath=$(echo $path | sed -e 's/java-9/java-8/'); \
|
|
||||||
update-alternatives --set $alternative $newpath; \
|
|
||||||
done
|
|
||||||
|
|
||||||
######
|
######
|
||||||
# Install cmake 3.1.0 (3.5.1 ships with Xenial)
|
# Install cmake 3.1.0 (3.5.1 ships with Xenial)
|
||||||
######
|
######
|
||||||
RUN mkdir -p /opt/cmake && \
|
RUN mkdir -p /opt/cmake \
|
||||||
curl -L -s -S \
|
&& curl -L -s -S \
|
||||||
https://cmake.org/files/v3.1/cmake-3.1.0-Linux-x86_64.tar.gz \
|
https://cmake.org/files/v3.1/cmake-3.1.0-Linux-x86_64.tar.gz \
|
||||||
-o /opt/cmake.tar.gz && \
|
-o /opt/cmake.tar.gz \
|
||||||
tar xzf /opt/cmake.tar.gz --strip-components 1 -C /opt/cmake
|
&& tar xzf /opt/cmake.tar.gz --strip-components 1 -C /opt/cmake
|
||||||
ENV CMAKE_HOME /opt/cmake
|
ENV CMAKE_HOME /opt/cmake
|
||||||
ENV PATH "${PATH}:/opt/cmake/bin"
|
ENV PATH "${PATH}:/opt/cmake/bin"
|
||||||
|
|
||||||
######
|
######
|
||||||
# Install Google Protobuf 2.5.0 (2.6.0 ships with Xenial)
|
# Install Google Protobuf 2.5.0 (2.6.0 ships with Xenial)
|
||||||
######
|
######
|
||||||
RUN mkdir -p /opt/protobuf-src && \
|
# hadolint ignore=DL3003
|
||||||
curl -L -s -S \
|
RUN mkdir -p /opt/protobuf-src \
|
||||||
|
&& curl -L -s -S \
|
||||||
https://github.com/google/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.gz \
|
https://github.com/google/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.gz \
|
||||||
-o /opt/protobuf.tar.gz && \
|
-o /opt/protobuf.tar.gz \
|
||||||
tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src
|
&& tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src \
|
||||||
RUN cd /opt/protobuf-src && ./configure --prefix=/opt/protobuf && make install
|
&& cd /opt/protobuf-src \
|
||||||
|
&& ./configure --prefix=/opt/protobuf \
|
||||||
|
&& make install \
|
||||||
|
&& cd /root \
|
||||||
|
&& rm -rf /opt/protobuf-src
|
||||||
ENV PROTOBUF_HOME /opt/protobuf
|
ENV PROTOBUF_HOME /opt/protobuf
|
||||||
ENV PATH "${PATH}:/opt/protobuf/bin"
|
ENV PATH "${PATH}:/opt/protobuf/bin"
|
||||||
|
|
||||||
######
|
######
|
||||||
# Install Apache Maven 3.3.9 (3.3.9 ships with Xenial)
|
# Install Apache Maven 3.3.9 (3.3.9 ships with Xenial)
|
||||||
######
|
######
|
||||||
RUN apt-get -q update && apt-get -q install -y maven
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends maven \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
ENV MAVEN_HOME /usr
|
ENV MAVEN_HOME /usr
|
||||||
|
|
||||||
######
|
######
|
||||||
# Install findbugs 3.0.1 (3.0.1 ships with Xenial)
|
# Install findbugs 3.0.1 (3.0.1 ships with Xenial)
|
||||||
# Ant is needed for findbugs
|
# Ant is needed for findbugs
|
||||||
######
|
######
|
||||||
RUN apt-get -q update && apt-get -q install -y findbugs ant
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends findbugs ant \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
ENV FINDBUGS_HOME /usr
|
ENV FINDBUGS_HOME /usr
|
||||||
|
|
||||||
####
|
####
|
||||||
# Install shellcheck (0.4.6, the latest as of 2017-09-26)
|
# Install shellcheck (0.4.6, the latest as of 2017-09-26)
|
||||||
####
|
####
|
||||||
RUN add-apt-repository -y ppa:jonathonf/ghc-8.0.2
|
# hadolint ignore=DL3008
|
||||||
RUN apt-get -q update && apt-get -q install -y shellcheck
|
RUN add-apt-repository -y ppa:jonathonf/ghc-8.0.2 \
|
||||||
|
&& apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends shellcheck \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
####
|
####
|
||||||
# Install bats (0.4.0, the latest as of 2017-09-26, ships with Xenial)
|
# Install bats (0.4.0, the latest as of 2017-09-26, ships with Xenial)
|
||||||
####
|
####
|
||||||
RUN apt-get -q update && apt-get -q install -y bats
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends bats \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
####
|
####
|
||||||
# Install pylint at fixed version (2.0.0 removed python2 support)
|
# Install pylint at fixed version (2.0.0 removed python2 support)
|
||||||
@ -162,17 +170,29 @@ RUN pip2 install pylint==1.9.2
|
|||||||
####
|
####
|
||||||
# Install dateutil.parser
|
# Install dateutil.parser
|
||||||
####
|
####
|
||||||
RUN pip2 install python-dateutil
|
RUN pip2 install python-dateutil==2.7.3
|
||||||
|
|
||||||
###
|
###
|
||||||
# Install node.js for web UI framework (4.2.6 ships with Xenial)
|
# Install node.js for web UI framework (4.2.6 ships with Xenial)
|
||||||
###
|
###
|
||||||
RUN apt-get -y install nodejs && \
|
# hadolint ignore=DL3008, DL3016
|
||||||
ln -s /usr/bin/nodejs /usr/bin/node && \
|
RUN apt-get -q update \
|
||||||
apt-get -y install npm && \
|
&& apt-get install -y --no-install-recommends nodejs npm \
|
||||||
npm install npm@latest -g && \
|
&& apt-get clean \
|
||||||
npm install -g bower && \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
npm install -g ember-cli
|
&& ln -s /usr/bin/nodejs /usr/bin/node \
|
||||||
|
&& npm install npm@latest -g \
|
||||||
|
&& npm install -g jshint
|
||||||
|
|
||||||
|
###
|
||||||
|
# Install hadolint
|
||||||
|
####
|
||||||
|
RUN curl -L -s -S \
|
||||||
|
https://github.com/hadolint/hadolint/releases/download/v1.11.1/hadolint-Linux-x86_64 \
|
||||||
|
-o /bin/hadolint \
|
||||||
|
&& chmod a+rx /bin/hadolint \
|
||||||
|
&& shasum -a 512 /bin/hadolint | \
|
||||||
|
awk '$1!="734e37c1f6619cbbd86b9b249e69c9af8ee1ea87a2b1ff71dccda412e9dac35e63425225a95d71572091a3f0a11e9a04c2fc25d9e91b840530c26af32b9891ca" {exit(1)}'
|
||||||
|
|
||||||
###
|
###
|
||||||
# Avoid out of memory errors in builds
|
# Avoid out of memory errors in builds
|
||||||
@ -189,21 +209,27 @@ ENV MAVEN_OPTS -Xms256m -Xmx1536m
|
|||||||
####
|
####
|
||||||
# Install svn & Forrest (for Apache Hadoop website)
|
# Install svn & Forrest (for Apache Hadoop website)
|
||||||
###
|
###
|
||||||
RUN apt-get -q update && apt-get -q install -y subversion
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get -q update \
|
||||||
|
&& apt-get -q install -y --no-install-recommends subversion \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir -p /opt/apache-forrest && \
|
RUN mkdir -p /opt/apache-forrest \
|
||||||
curl -L -s -S \
|
&& curl -L -s -S \
|
||||||
https://archive.apache.org/dist/forrest/0.8/apache-forrest-0.8.tar.gz \
|
https://archive.apache.org/dist/forrest/0.8/apache-forrest-0.8.tar.gz \
|
||||||
-o /opt/forrest.tar.gz && \
|
-o /opt/forrest.tar.gz \
|
||||||
tar xzf /opt/forrest.tar.gz --strip-components 1 -C /opt/apache-forrest
|
&& tar xzf /opt/forrest.tar.gz --strip-components 1 -C /opt/apache-forrest
|
||||||
RUN echo 'forrest.home=/opt/apache-forrest' > build.properties
|
RUN echo 'forrest.home=/opt/apache-forrest' > build.properties
|
||||||
ENV FORREST_HOME=/opt/apache-forrest
|
ENV FORREST_HOME=/opt/apache-forrest
|
||||||
|
|
||||||
# Hugo static website generator (for new hadoop site and Ozone docs)
|
# Hugo static website generator (for new hadoop site and Ozone docs)
|
||||||
RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.30.2/hugo_0.30.2_Linux-64bit.deb && dpkg --install hugo.deb && rm hugo.deb
|
RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.30.2/hugo_0.30.2_Linux-64bit.deb \
|
||||||
|
&& dpkg --install hugo.deb \
|
||||||
|
&& rm hugo.deb
|
||||||
|
|
||||||
# Add a welcome message and environment checks.
|
# Add a welcome message and environment checks.
|
||||||
ADD hadoop_env_checks.sh /root/hadoop_env_checks.sh
|
COPY hadoop_env_checks.sh /root/hadoop_env_checks.sh
|
||||||
RUN chmod 755 /root/hadoop_env_checks.sh
|
RUN chmod 755 /root/hadoop_env_checks.sh
|
||||||
RUN echo '~/hadoop_env_checks.sh' >> /root/.bashrc
|
# hadolint ignore=SC2016
|
||||||
|
RUN echo '${HOME}/hadoop_env_checks.sh' >> /root/.bashrc
|
||||||
|
@ -1079,6 +1079,8 @@
|
|||||||
<argument>--projecttitle</argument>
|
<argument>--projecttitle</argument>
|
||||||
<argument>Apache Hadoop</argument>
|
<argument>Apache Hadoop</argument>
|
||||||
<argument>--usetoday</argument>
|
<argument>--usetoday</argument>
|
||||||
|
<argument>--fileversions</argument>
|
||||||
|
<argument>--dirversions</argument>
|
||||||
<argument>--version</argument>
|
<argument>--version</argument>
|
||||||
<argument>${project.version}</argument>
|
<argument>${project.version}</argument>
|
||||||
</arguments>
|
</arguments>
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.15.4 - Unreleased (as of 2017-08-28)
|
## Release 0.15.4 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.17.3 - Unreleased (as of 2017-08-28)
|
## Release 0.17.3 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.18.4 - Unreleased (as of 2017-08-28)
|
## Release 0.18.4 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.20.203.1 - Unreleased (as of 2017-08-28)
|
## Release 0.20.203.1 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.20.3 - Unreleased (as of 2017-08-28)
|
## Release 0.20.3 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
### INCOMPATIBLE CHANGES:
|
### INCOMPATIBLE CHANGES:
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.21.1 - Unreleased (as of 2017-08-28)
|
## Release 0.21.1 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
### INCOMPATIBLE CHANGES:
|
### INCOMPATIBLE CHANGES:
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.22.1 - Unreleased (as of 2017-08-28)
|
## Release 0.22.1 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
### INCOMPATIBLE CHANGES:
|
### INCOMPATIBLE CHANGES:
|
||||||
|
|
@ -97,6 +97,7 @@
|
|||||||
| [HDFS-5557](https://issues.apache.org/jira/browse/HDFS-5557) | Write pipeline recovery for the last packet in the block may cause rejection of valid replicas | Critical | . | Kihwal Lee | Kihwal Lee |
|
| [HDFS-5557](https://issues.apache.org/jira/browse/HDFS-5557) | Write pipeline recovery for the last packet in the block may cause rejection of valid replicas | Critical | . | Kihwal Lee | Kihwal Lee |
|
||||||
| [HDFS-5526](https://issues.apache.org/jira/browse/HDFS-5526) | Datanode cannot roll back to previous layout version | Blocker | datanode | Tsz Wo Nicholas Sze | Kihwal Lee |
|
| [HDFS-5526](https://issues.apache.org/jira/browse/HDFS-5526) | Datanode cannot roll back to previous layout version | Blocker | datanode | Tsz Wo Nicholas Sze | Kihwal Lee |
|
||||||
| [HDFS-5558](https://issues.apache.org/jira/browse/HDFS-5558) | LeaseManager monitor thread can crash if the last block is complete but another block is not. | Major | . | Kihwal Lee | Kihwal Lee |
|
| [HDFS-5558](https://issues.apache.org/jira/browse/HDFS-5558) | LeaseManager monitor thread can crash if the last block is complete but another block is not. | Major | . | Kihwal Lee | Kihwal Lee |
|
||||||
|
| [HDFS-5357](https://issues.apache.org/jira/browse/HDFS-5357) | TestFileSystemAccessService failures in JDK7 | Major | . | Robert Parker | Robert Parker |
|
||||||
|
|
||||||
|
|
||||||
### TESTS:
|
### TESTS:
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.23.2 - Unreleased (as of 2017-08-28)
|
## Release 0.23.2 - Unreleased (as of 2018-09-01)
|
||||||
|
|
||||||
### INCOMPATIBLE CHANGES:
|
### INCOMPATIBLE CHANGES:
|
||||||
|
|
@ -35,7 +35,7 @@
|
|||||||
| [MAPREDUCE-4845](https://issues.apache.org/jira/browse/MAPREDUCE-4845) | ClusterStatus.getMaxMemory() and getUsedMemory() exist in MR1 but not MR2 | Major | client | Sandy Ryza | Sandy Ryza |
|
| [MAPREDUCE-4845](https://issues.apache.org/jira/browse/MAPREDUCE-4845) | ClusterStatus.getMaxMemory() and getUsedMemory() exist in MR1 but not MR2 | Major | client | Sandy Ryza | Sandy Ryza |
|
||||||
| [YARN-285](https://issues.apache.org/jira/browse/YARN-285) | RM should be able to provide a tracking link for apps that have already been purged | Major | . | Derek Dagit | Derek Dagit |
|
| [YARN-285](https://issues.apache.org/jira/browse/YARN-285) | RM should be able to provide a tracking link for apps that have already been purged | Major | . | Derek Dagit | Derek Dagit |
|
||||||
| [MAPREDUCE-4899](https://issues.apache.org/jira/browse/MAPREDUCE-4899) | Provide a plugin to the Yarn Web App Proxy to generate tracking links for M/R appllications given the ID | Major | . | Derek Dagit | Derek Dagit |
|
| [MAPREDUCE-4899](https://issues.apache.org/jira/browse/MAPREDUCE-4899) | Provide a plugin to the Yarn Web App Proxy to generate tracking links for M/R appllications given the ID | Major | . | Derek Dagit | Derek Dagit |
|
||||||
| [MAPREDUCE-4810](https://issues.apache.org/jira/browse/MAPREDUCE-4810) | Add admin command options for ApplicationMaster | Minor | applicationmaster | Jason Lowe | Jerry Chen |
|
| [MAPREDUCE-4810](https://issues.apache.org/jira/browse/MAPREDUCE-4810) | Add admin command options for ApplicationMaster | Minor | applicationmaster | Jason Lowe | Haifeng Chen |
|
||||||
|
|
||||||
|
|
||||||
### BUG FIXES:
|
### BUG FIXES:
|
||||||
@ -84,7 +84,7 @@
|
|||||||
| [HADOOP-9181](https://issues.apache.org/jira/browse/HADOOP-9181) | Set daemon flag for HttpServer's QueuedThreadPool | Major | . | Liang Xie | Liang Xie |
|
| [HADOOP-9181](https://issues.apache.org/jira/browse/HADOOP-9181) | Set daemon flag for HttpServer's QueuedThreadPool | Major | . | Liang Xie | Liang Xie |
|
||||||
| [YARN-320](https://issues.apache.org/jira/browse/YARN-320) | RM should always be able to renew its own tokens | Blocker | resourcemanager | Daryn Sharp | Daryn Sharp |
|
| [YARN-320](https://issues.apache.org/jira/browse/YARN-320) | RM should always be able to renew its own tokens | Blocker | resourcemanager | Daryn Sharp | Daryn Sharp |
|
||||||
| [YARN-325](https://issues.apache.org/jira/browse/YARN-325) | RM CapacityScheduler can deadlock when getQueueInfo() is called and a container is completing | Blocker | capacityscheduler | Jason Lowe | Arun C Murthy |
|
| [YARN-325](https://issues.apache.org/jira/browse/YARN-325) | RM CapacityScheduler can deadlock when getQueueInfo() is called and a container is completing | Blocker | capacityscheduler | Jason Lowe | Arun C Murthy |
|
||||||
| [MAPREDUCE-4848](https://issues.apache.org/jira/browse/MAPREDUCE-4848) | TaskAttemptContext cast error during AM recovery | Major | mr-am | Jason Lowe | Jerry Chen |
|
| [MAPREDUCE-4848](https://issues.apache.org/jira/browse/MAPREDUCE-4848) | TaskAttemptContext cast error during AM recovery | Major | mr-am | Jason Lowe | Haifeng Chen |
|
||||||
| [HADOOP-9097](https://issues.apache.org/jira/browse/HADOOP-9097) | Maven RAT plugin is not checking all source files | Critical | build | Tom White | Thomas Graves |
|
| [HADOOP-9097](https://issues.apache.org/jira/browse/HADOOP-9097) | Maven RAT plugin is not checking all source files | Critical | build | Tom White | Thomas Graves |
|
||||||
| [HDFS-4385](https://issues.apache.org/jira/browse/HDFS-4385) | Maven RAT plugin is not checking all source files | Critical | build | Thomas Graves | Thomas Graves |
|
| [HDFS-4385](https://issues.apache.org/jira/browse/HDFS-4385) | Maven RAT plugin is not checking all source files | Critical | build | Thomas Graves | Thomas Graves |
|
||||||
| [MAPREDUCE-4934](https://issues.apache.org/jira/browse/MAPREDUCE-4934) | Maven RAT plugin is not checking all source files | Critical | build | Thomas Graves | Thomas Graves |
|
| [MAPREDUCE-4934](https://issues.apache.org/jira/browse/MAPREDUCE-4934) | Maven RAT plugin is not checking all source files | Critical | build | Thomas Graves | Thomas Graves |
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 0.24.0 - Unreleased (as of 2017-08-28)
|
## Release 0.24.0 - Unreleased (as of 2018-09-02)
|
||||||
|
|
||||||
### INCOMPATIBLE CHANGES:
|
### INCOMPATIBLE CHANGES:
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 1.1.3 - Unreleased (as of 2017-08-28)
|
## Release 1.1.3 - Unreleased (as of 2018-09-02)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 1.2.2 - Unreleased (as of 2017-08-28)
|
## Release 1.2.2 - Unreleased (as of 2018-09-02)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@
|
|||||||
-->
|
-->
|
||||||
# Apache Hadoop Changelog
|
# Apache Hadoop Changelog
|
||||||
|
|
||||||
## Release 1.3.0 - Unreleased (as of 2017-08-28)
|
## Release 1.3.0 - Unreleased (as of 2018-09-02)
|
||||||
|
|
||||||
### INCOMPATIBLE CHANGES:
|
### INCOMPATIBLE CHANGES:
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user