HADOOP-8952. Enhancements to support Hadoop on Windows Server and Windows Azure environments. Contributed by Ivan Mitic, Chuan Liu, Ramya Sunil, Bikas Saha, Kanna Karanam, John Gordon, Brandon Li, Chris Nauroth, David Lao, Sumadhur Reddy Bolli, Arpit Agarwal, Ahmed El Baz, Mike Liddell, Jing Zhao, Thejas Nair, Steve Maine, Ganeshan Iyer, Raja Aluri, Giridharan Kesavan, Ramya Bharathi Nimmagadda.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1453486 13f79535-47bb-0310-9956-ffa450edef68
2013-03-06 19:15:18 +00:00
|
|
|
@echo off
|
|
|
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
@rem contributor license agreements. See the NOTICE file distributed with
|
|
|
|
@rem this work for additional information regarding copyright ownership.
|
|
|
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
@rem (the "License"); you may not use this file except in compliance with
|
|
|
|
@rem the License. You may obtain a copy of the License at
|
|
|
|
@rem
|
|
|
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
@rem
|
|
|
|
@rem Unless required by applicable law or agreed to in writing, software
|
|
|
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
@rem See the License for the specific language governing permissions and
|
|
|
|
@rem limitations under the License.
|
|
|
|
|
|
|
|
@rem Set Hadoop-specific environment variables here.
|
|
|
|
|
|
|
|
@rem The only required environment variable is JAVA_HOME. All others are
|
|
|
|
@rem optional. When running a distributed configuration it is best to
|
|
|
|
@rem set JAVA_HOME in this file, so that it is correctly defined on
|
|
|
|
@rem remote nodes.
|
|
|
|
|
|
|
|
@rem The java implementation to use. Required.
|
|
|
|
set JAVA_HOME=%JAVA_HOME%
|
|
|
|
|
|
|
|
@rem The jsvc implementation to use. Jsvc is required to run secure datanodes.
|
|
|
|
@rem set JSVC_HOME=%JSVC_HOME%
|
|
|
|
|
|
|
|
@rem set HADOOP_CONF_DIR=
|
|
|
|
|
|
|
|
@rem Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
|
|
|
|
if exist %HADOOP_HOME%\contrib\capacity-scheduler (
|
|
|
|
if not defined HADOOP_CLASSPATH (
|
|
|
|
set HADOOP_CLASSPATH=%HADOOP_HOME%\contrib\capacity-scheduler\*.jar
|
|
|
|
) else (
|
|
|
|
set HADOOP_CLASSPATH=%HADOOP_CLASSPATH%;%HADOOP_HOME%\contrib\capacity-scheduler\*.jar
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
@rem The maximum amount of heap to use, in MB. Default is 1000.
|
|
|
|
@rem set HADOOP_HEAPSIZE=
|
|
|
|
@rem set HADOOP_NAMENODE_INIT_HEAPSIZE=""
|
|
|
|
|
|
|
|
@rem Extra Java runtime options. Empty by default.
|
2013-05-01 16:17:17 +00:00
|
|
|
@rem set HADOOP_OPTS=-Djava.net.preferIPv4Stack=true
|
HADOOP-8952. Enhancements to support Hadoop on Windows Server and Windows Azure environments. Contributed by Ivan Mitic, Chuan Liu, Ramya Sunil, Bikas Saha, Kanna Karanam, John Gordon, Brandon Li, Chris Nauroth, David Lao, Sumadhur Reddy Bolli, Arpit Agarwal, Ahmed El Baz, Mike Liddell, Jing Zhao, Thejas Nair, Steve Maine, Ganeshan Iyer, Raja Aluri, Giridharan Kesavan, Ramya Bharathi Nimmagadda.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1453486 13f79535-47bb-0310-9956-ffa450edef68
2013-03-06 19:15:18 +00:00
|
|
|
|
|
|
|
@rem Command specific options appended to HADOOP_OPTS when specified
|
|
|
|
if not defined HADOOP_SECURITY_LOGGER (
|
|
|
|
set HADOOP_SECURITY_LOGGER=INFO,RFAS
|
|
|
|
)
|
|
|
|
if not defined HDFS_AUDIT_LOGGER (
|
|
|
|
set HDFS_AUDIT_LOGGER=INFO,NullAppender
|
|
|
|
)
|
|
|
|
|
|
|
|
set HADOOP_NAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_NAMENODE_OPTS%
|
|
|
|
set HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS %HADOOP_DATANODE_OPTS%
|
|
|
|
set HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_SECONDARYNAMENODE_OPTS%
|
|
|
|
|
|
|
|
@rem The following applies to multiple commands (fs, dfs, fsck, distcp etc)
|
|
|
|
set HADOOP_CLIENT_OPTS=-Xmx128m %HADOOP_CLIENT_OPTS%
|
|
|
|
@rem set HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData %HADOOP_JAVA_PLATFORM_OPTS%"
|
|
|
|
|
|
|
|
@rem On secure datanodes, user to run the datanode as after dropping privileges
|
|
|
|
set HADOOP_SECURE_DN_USER=%HADOOP_SECURE_DN_USER%
|
|
|
|
|
|
|
|
@rem Where log files are stored. %HADOOP_HOME%/logs by default.
|
|
|
|
@rem set HADOOP_LOG_DIR=%HADOOP_LOG_DIR%\%USERNAME%
|
|
|
|
|
|
|
|
@rem Where log files are stored in the secure data environment.
|
|
|
|
set HADOOP_SECURE_DN_LOG_DIR=%HADOOP_LOG_DIR%\%HADOOP_HDFS_USER%
|
|
|
|
|
|
|
|
@rem The directory where pid files are stored. /tmp by default.
|
|
|
|
@rem NOTE: this should be set to a directory that can only be written to by
|
|
|
|
@rem the user that will run the hadoop daemons. Otherwise there is the
|
|
|
|
@rem potential for a symlink attack.
|
|
|
|
set HADOOP_PID_DIR=%HADOOP_PID_DIR%
|
|
|
|
set HADOOP_SECURE_DN_PID_DIR=%HADOOP_PID_DIR%
|
|
|
|
|
|
|
|
@rem A string representing this instance of hadoop. %USERNAME% by default.
|
|
|
|
set HADOOP_IDENT_STRING=%USERNAME%
|