# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Dockerfile for installing the necessary dependencies for building Hadoop. # See BUILDING.txt. FROM ubuntu:bionic WORKDIR /root SHELL ["/bin/bash", "-o", "pipefail", "-c"] ##### # Disable suggests/recommends ##### RUN echo APT::Install-Recommends "0"\; > /etc/apt/apt.conf.d/10disableextras RUN echo APT::Install-Suggests "0"\; >> /etc/apt/apt.conf.d/10disableextras ENV DEBIAN_FRONTEND noninteractive ENV DEBCONF_TERSE true ###### # Install common dependencies from packages. Versions here are either # sufficient or irrelevant. ###### # hadolint ignore=DL3008 RUN apt-get -q update \ && apt-get -q install -y --no-install-recommends \ ant \ apt-utils \ bats \ build-essential \ bzip2 \ clang \ cmake \ curl \ doxygen \ fuse \ g++ \ gcc \ git \ gnupg-agent \ libbcprov-java \ libbz2-dev \ libcurl4-openssl-dev \ libfuse-dev \ libprotobuf-dev \ libprotoc-dev \ libsasl2-dev \ libsnappy-dev \ libssl-dev \ libtool \ libzstd1-dev \ locales \ make \ maven \ openjdk-11-jdk \ openjdk-8-jdk \ phantomjs \ pinentry-curses \ pkg-config \ python2.7 \ python3 \ python3-pip \ python3-pkg-resources \ python3-setuptools \ python3-wheel \ rsync \ shellcheck \ software-properties-common \ sudo \ valgrind \ zlib1g-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* RUN locale-gen en_US.UTF-8 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' ENV PYTHONIOENCODING=utf-8 ###### # Set env vars required to build Hadoop ###### ENV MAVEN_HOME /usr # JAVA_HOME must be set in Maven >= 3.5.0 (MNG-6003) ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-arm64 ####### # Install SpotBugs 4.2.2 ####### RUN mkdir -p /opt/spotbugs \ && curl -L -s -S https://github.com/spotbugs/spotbugs/releases/download/4.2.2/spotbugs-4.2.2.tgz \ -o /opt/spotbugs.tgz \ && tar xzf /opt/spotbugs.tgz --strip-components 1 -C /opt/spotbugs \ && chmod +x /opt/spotbugs/bin/* ENV SPOTBUGS_HOME /opt/spotbugs ###### # Install Google Protobuf 3.7.1 (3.0.0 ships with Bionic) ###### # hadolint ignore=DL3003 RUN mkdir -p /opt/protobuf-src \ && curl -L -s -S \ https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \ -o /opt/protobuf.tar.gz \ && tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src \ && cd /opt/protobuf-src \ && ./configure --prefix=/opt/protobuf \ && make "-j$(nproc)" \ && make install \ && cd /root \ && rm -rf /opt/protobuf-src ENV PROTOBUF_HOME /opt/protobuf ENV PATH "${PATH}:/opt/protobuf/bin" #### # Install pylint and python-dateutil #### RUN pip3 install pylint==2.6.0 python-dateutil==2.8.1 ### # Avoid out of memory errors in builds ### ENV MAVEN_OPTS -Xms256m -Xmx1536m # Skip gpg verification when downloading Yetus via yetus-wrapper ENV HADOOP_SKIP_YETUS_VERIFICATION true # Force PhantomJS to be in 'headless' mode, do not connect to Xwindow ENV QT_QPA_PLATFORM offscreen ### # Everything past this point is either not needed for testing or breaks Yetus. # So tell Yetus not to read the rest of the file: # YETUS CUT HERE ### # Hugo static website generator (for new hadoop site docs) RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.58.3/hugo_0.58.3_Linux-ARM64.deb \ && dpkg --install hugo.deb \ && rm hugo.deb # Add a welcome message and environment checks. COPY hadoop_env_checks.sh /root/hadoop_env_checks.sh RUN chmod 755 /root/hadoop_env_checks.sh # hadolint ignore=SC2016 RUN echo '${HOME}/hadoop_env_checks.sh' >> /root/.bashrc