# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Dockerfile for installing the necessary dependencies for building Hadoop. # See BUILDING.txt. FROM ubuntu:bionic WORKDIR /root SHELL ["/bin/bash", "-o", "pipefail", "-c"] ##### # Disable suggests/recommends ##### RUN echo APT::Install-Recommends "0"\; > /etc/apt/apt.conf.d/10disableextras RUN echo APT::Install-Suggests "0"\; >> /etc/apt/apt.conf.d/10disableextras ENV DEBIAN_FRONTEND noninteractive ENV DEBCONF_TERSE true ###### # Install common dependencies from packages. Versions here are either # sufficient or irrelevant. ###### # hadolint ignore=DL3008 RUN apt-get -q update \ && apt-get -q install -y --no-install-recommends \ ant \ apt-utils \ bats \ build-essential \ bzip2 \ clang \ cmake \ curl \ doxygen \ findbugs \ fuse \ g++ \ gcc \ git \ gnupg-agent \ libbcprov-java \ libbz2-dev \ libcurl4-openssl-dev \ libfuse-dev \ libprotobuf-dev \ libprotoc-dev \ libsasl2-dev \ libsnappy-dev \ libssl-dev \ libtool \ libzstd1-dev \ locales \ make \ maven \ openjdk-11-jdk \ openjdk-8-jdk \ pinentry-curses \ pkg-config \ python \ python2.7 \ python-pip \ python-pkg-resources \ python-setuptools \ python-wheel \ rsync \ shellcheck \ software-properties-common \ sudo \ valgrind \ zlib1g-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* ###### # Set env vars required to build Hadoop ###### ENV MAVEN_HOME /usr # JAVA_HOME must be set in Maven >= 3.5.0 (MNG-6003) ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-arm64 ENV FINDBUGS_HOME /usr ###### # Install Google Protobuf 3.7.1 (3.0.0 ships with Bionic) ###### # hadolint ignore=DL3003 RUN mkdir -p /opt/protobuf-src \ && curl -L -s -S \ https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \ -o /opt/protobuf.tar.gz \ && tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src \ && cd /opt/protobuf-src \ && ./configure --prefix=/opt/protobuf \ && make install \ && cd /root \ && rm -rf /opt/protobuf-src ENV PROTOBUF_HOME /opt/protobuf ENV PATH "${PATH}:/opt/protobuf/bin" #### # Install pylint at fixed version (2.0.0 removed python2 support) # https://github.com/PyCQA/pylint/issues/2294 #### RUN pip2 install \ astroid==1.6.6 \ isort==4.3.21 \ configparser==4.0.2 \ pylint==1.9.2 #### # Install dateutil.parser #### RUN pip2 install python-dateutil==2.7.3 ### # Install node.js 10.x for web UI framework (4.2.6 ships with Xenial) ### # hadolint ignore=DL3008 RUN curl -L -s -S https://deb.nodesource.com/setup_10.x | bash - \ && apt-get install -y --no-install-recommends nodejs \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && npm install -g bower@1.8.8 ### ## Install Yarn 1.22.5 for web UI framework #### RUN curl -s -S https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \ && echo 'deb https://dl.yarnpkg.com/debian/ stable main' > /etc/apt/sources.list.d/yarn.list \ && apt-get -q update \ && apt-get install -y --no-install-recommends yarn=1.22.5-1 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* ### # Install phantomjs built for aarch64 #### RUN mkdir -p /opt/phantomjs \ && curl -L -s -S \ https://github.com/liusheng/phantomjs/releases/download/2.1.1/phantomjs-2.1.1-linux-aarch64.tar.bz2 \ -o /opt/phantomjs/phantomjs-2.1.1-linux-aarch64.tar.bz2 \ && tar xvjf /opt/phantomjs/phantomjs-2.1.1-linux-aarch64.tar.bz2 --strip-components 1 -C /opt/phantomjs \ && cp /opt/phantomjs/bin/phantomjs /usr/bin/ \ && rm -rf /opt/phantomjs ### # Avoid out of memory errors in builds ### ENV MAVEN_OPTS -Xms256m -Xmx1536m # Skip gpg verification when downloading Yetus via yetus-wrapper ENV HADOOP_SKIP_YETUS_VERIFICATION true ### # Everything past this point is either not needed for testing or breaks Yetus. # So tell Yetus not to read the rest of the file: # YETUS CUT HERE ### # Hugo static website generator (for new hadoop site docs) RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.58.3/hugo_0.58.3_Linux-ARM64.deb \ && dpkg --install hugo.deb \ && rm hugo.deb # Add a welcome message and environment checks. COPY hadoop_env_checks.sh /root/hadoop_env_checks.sh RUN chmod 755 /root/hadoop_env_checks.sh # hadolint ignore=SC2016 RUN echo '${HOME}/hadoop_env_checks.sh' >> /root/.bashrc