2012-05-17 22:47:41 +00:00
|
|
|
# Copyright 2011 The Apache Software Foundation
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
# Set Hadoop-specific environment variables here.
|
|
|
|
|
|
|
|
# The only required environment variable is JAVA_HOME. All others are
|
|
|
|
# optional. When running a distributed configuration it is best to
|
|
|
|
# set JAVA_HOME in this file, so that it is correctly defined on
|
|
|
|
# remote nodes.
|
|
|
|
|
|
|
|
# The java implementation to use.
|
|
|
|
export JAVA_HOME=${JAVA_HOME}
|
|
|
|
|
|
|
|
# The jsvc implementation to use. Jsvc is required to run secure datanodes.
|
|
|
|
#export JSVC_HOME=${JSVC_HOME}
|
|
|
|
|
|
|
|
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
|
|
|
|
|
|
|
|
# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
|
|
|
|
for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
|
|
|
|
if [ "$HADOOP_CLASSPATH" ]; then
|
|
|
|
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
|
|
|
|
else
|
|
|
|
export HADOOP_CLASSPATH=$f
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
# The maximum amount of heap to use, in MB. Default is 1000.
|
|
|
|
#export HADOOP_HEAPSIZE=
|
|
|
|
#export HADOOP_NAMENODE_INIT_HEAPSIZE=""
|
|
|
|
|
|
|
|
# Extra Java runtime options. Empty by default.
|
|
|
|
export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
|
|
|
|
|
2012-08-27 20:16:16 +00:00
|
|
|
MAC_OSX=false
|
|
|
|
case "`uname`" in
|
|
|
|
Darwin*) MAC_OSX=true;;
|
|
|
|
esac
|
|
|
|
if $MAC_OSX; then
|
|
|
|
export HADOOP_OPTS="$HADOOP_OPTS -Djava.security.krb5.realm= -Djava.security.krb5.kdc="
|
|
|
|
fi
|
|
|
|
|
2012-05-17 22:47:41 +00:00
|
|
|
# Command specific options appended to HADOOP_OPTS when specified
|
|
|
|
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
|
|
|
|
export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
|
|
|
|
|
|
|
|
export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
|
|
|
|
|
|
|
|
# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
|
2013-04-12 17:51:30 +00:00
|
|
|
export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
|
2012-05-17 22:47:41 +00:00
|
|
|
#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
|
|
|
|
|
|
|
|
# On secure datanodes, user to run the datanode as after dropping privileges
|
|
|
|
export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
|
|
|
|
|
|
|
|
# Where log files are stored. $HADOOP_HOME/logs by default.
|
2012-06-13 05:37:27 +00:00
|
|
|
#export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
|
2012-05-17 22:47:41 +00:00
|
|
|
|
|
|
|
# Where log files are stored in the secure data environment.
|
|
|
|
export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
|
|
|
|
|
|
|
|
# The directory where pid files are stored. /tmp by default.
|
2012-06-01 18:18:16 +00:00
|
|
|
# NOTE: this should be set to a directory that can only be written to by
|
|
|
|
# the user that will run the hadoop daemons. Otherwise there is the
|
|
|
|
# potential for a symlink attack.
|
2012-05-17 22:47:41 +00:00
|
|
|
export HADOOP_PID_DIR=${HADOOP_PID_DIR}
|
|
|
|
export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
|
|
|
|
|
|
|
|
# A string representing this instance of hadoop. $USER by default.
|
|
|
|
export HADOOP_IDENT_STRING=$USER
|