Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F102871742
hadoop-env.cmd
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Tue, Feb 25, 01:48
Size
3 KB
Mime Type
text/x-msdos-batch
Expires
Thu, Feb 27, 01:48 (1 d, 20 h)
Engine
blob
Format
Raw Data
Handle
24384308
Attached To
R3704 elastic-yarn
hadoop-env.cmd
View Options
@
echo
off
@
rem Licensed to the Apache Software Foundation (ASF) under one or more
@
rem contributor license agreements. See the NOTICE file distributed with
@
rem this work
for
additional information regarding copyright ownership.
@
rem The ASF licenses this file to You under the Apache License
,
Version
2
.
0
@
rem (the
"License"
); you may not use this file except in compliance with
@
rem the License. You may obtain a copy of the License at
@
rem
@
rem http:
//www.apache.org/licenses/LICENSE-2.0
@
rem
@
rem Unless required by applicable law or agreed to in writing
,
software
@
rem distributed under the License is distributed
on
an
"AS IS"
BASIS
,
@
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND
,
either express or implied.
@
rem See the License
for
the specific language governing permissions and
@
rem limitations under the License.
@
rem
Set
Hadoop
-specific environment variables here.
@
rem The only required environment variable is JAVA_HOME. All others are
@
rem optional. When running a distributed configuration it is best to
@
rem
set
JAVA_HOME
in this file
,
so that it is correctly
defined
on
@
rem remote nodes.
@
rem The java implementation to use. Required.
set
JAVA_HOME
=
%JAVA_HOME%
@
rem The jsvc implementation to use. Jsvc is required to run secure datanodes.
@
rem
set
JSVC_HOME
=
%JSVC_HOME%
@
rem
set
HADOOP_CONF_DIR
=
@
rem Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
if
exist
%HADOOP_HOME%
\contrib\capacity-scheduler (
if
not
defined
HADOOP_CLASSPATH (
set
HADOOP_CLASSPATH
=
%HADOOP_HOME%
\contrib\capacity-scheduler\*.jar
)
else
(
set
HADOOP_CLASSPATH
=
%HADOOP_CLASSPATH%
;
%HADOOP_HOME%
\contrib\capacity-scheduler\*.jar
)
)
@
rem The maximum amount of heap to use
,
in MB. Default is
1000
.
@
rem
set
HADOOP_HEAPSIZE
=
@
rem
set
HADOOP_NAMENODE_INIT_HEAPSIZE
=
""
@
rem Extra Java runtime options. Empty by default.
@
rem
set
HADOOP_OPTS
=
%HADOOP_OPTS%
-Djava.net.preferIPv
4
Stack
=
true
@
rem Command specific options appended to HADOOP_OPTS when specified
if
not
defined
HADOOP_SECURITY_LOGGER (
set
HADOOP_SECURITY_LOGGER
=
INFO
,
RFAS
)
if
not
defined
HDFS_AUDIT_LOGGER (
set
HDFS_AUDIT_LOGGER
=
INFO
,
NullAppender
)
set
HADOOP_NAMENODE_OPTS
=
-Dhadoop.security.logger
=
%HADOOP_SECURITY_LOGGER%
-Dhdfs.audit.logger
=
%HDFS_AUDIT_LOGGER%
%HADOOP_NAMENODE_OPTS%
set
HADOOP_DATANODE_OPTS
=
-Dhadoop.security.logger
=
ERROR
,
RFAS
%HADOOP_DATANODE_OPTS%
set
HADOOP_SECONDARYNAMENODE_OPTS
=
-Dhadoop.security.logger
=
%HADOOP_SECURITY_LOGGER%
-Dhdfs.audit.logger
=
%HDFS_AUDIT_LOGGER%
%HADOOP_SECONDARYNAMENODE_OPTS%
@
rem The following applies to multiple commands (fs
,
dfs
,
fsck
,
distcp etc)
set
HADOOP_CLIENT_OPTS
=
-Xmx
512
m
%HADOOP_CLIENT_OPTS%
@
rem
set
HADOOP_JAVA_PLATFORM_OPTS
=
"-XX:-UsePerfData %HADOOP_JAVA_PLATFORM_OPTS%"
@
rem
On
secure datanodes
,
user to run the datanode as after dropping privileges
set
HADOOP_SECURE_DN_USER
=
%HADOOP_SECURE_DN_USER%
@
rem Where log files are stored.
%HADOOP_HOME%
/logs
by default.
@
rem
set
HADOOP_LOG_DIR
=
%HADOOP_LOG_DIR%
\
%USERNAME%
@
rem Where log files are stored in the secure data environment.
set
HADOOP_SECURE_DN_LOG_DIR
=
%HADOOP_LOG_DIR%
\
%HADOOP_HDFS_USER%
@
rem The directory where pid files are stored.
/tmp
by default.
@
rem NOTE: this should be
set
to
a directory that can only be written to by
@
rem the user that will run the hadoop daemons. Otherwise there is the
@
rem potential
for
a symlink attack.
set
HADOOP_PID_DIR
=
%HADOOP_PID_DIR%
set
HADOOP_SECURE_DN_PID_DIR
=
%HADOOP_PID_DIR%
@
rem A string representing this instance of hadoop.
%USERNAME%
by default.
set
HADOOP_IDENT_STRING
=
%USERNAME%
Event Timeline
Log In to Comment