Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F102958231
hdfs
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Tue, Feb 25, 20:52
Size
7 KB
Mime Type
text/x-shellscript
Expires
Thu, Feb 27, 20:52 (2 d)
Engine
blob
Format
Raw Data
Handle
24354102
Attached To
R3704 elastic-yarn
hdfs
View Options
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Environment Variables
#
# JSVC_HOME home directory of jsvc binary. Required for starting secure
# datanode.
#
# JSVC_OUTFILE path to jsvc output file. Defaults to
# $HADOOP_LOG_DIR/jsvc.out.
#
# JSVC_ERRFILE path to jsvc error file. Defaults to $HADOOP_LOG_DIR/jsvc.err.
bin
=
`
which
$0
`
bin
=
`
dirname
${
bin
}
`
bin
=
`
cd
"$bin"
;
pwd
`
DEFAULT_LIBEXEC_DIR
=
"$bin"
/../libexec
HADOOP_LIBEXEC_DIR
=
${
HADOOP_LIBEXEC_DIR
:-
$DEFAULT_LIBEXEC_DIR
}
.
$HADOOP_LIBEXEC_DIR
/hdfs-config.sh
function
print_usage
(){
echo
"Usage: hdfs [--config confdir] COMMAND"
echo
" where COMMAND is one of:"
echo
" dfs run a filesystem command on the file systems supported in Hadoop."
echo
" namenode -format format the DFS filesystem"
echo
" secondarynamenode run the DFS secondary namenode"
echo
" namenode run the DFS namenode"
echo
" journalnode run the DFS journalnode"
echo
" zkfc run the ZK Failover Controller daemon"
echo
" datanode run a DFS datanode"
echo
" dfsadmin run a DFS admin client"
echo
" haadmin run a DFS HA admin client"
echo
" fsck run a DFS filesystem checking utility"
echo
" balancer run a cluster balancing utility"
echo
" jmxget get JMX exported values from NameNode or DataNode."
echo
" oiv apply the offline fsimage viewer to an fsimage"
echo
" oev apply the offline edits viewer to an edits file"
echo
" fetchdt fetch a delegation token from the NameNode"
echo
" getconf get config values from configuration"
echo
" groups get the groups which users belong to"
echo
" snapshotDiff diff two snapshots of a directory or diff the"
echo
" current directory contents with a snapshot"
echo
" lsSnapshottableDir list all snapshottable dirs owned by the current user"
echo
" Use -help to see options"
echo
" portmap run a portmap service"
echo
" nfs3 run an NFS version 3 gateway"
echo
" cacheadmin configure the HDFS cache"
echo
""
echo
"Most commands print help when invoked w/o parameters."
}
if
[
$#
=
0
]
;
then
print_usage
exit
fi
COMMAND
=
$1
shift
case
$COMMAND
in
# usage flags
--help|-help|-h
)
print_usage
exit
;;
esac
# Determine if we're starting a secure datanode, and if so, redefine appropriate variables
if
[
"$COMMAND"
==
"datanode"
]
&&
[
"$EUID"
-eq 0
]
&&
[
-n
"$HADOOP_SECURE_DN_USER"
]
;
then
if
[
-n
"$JSVC_HOME"
]
;
then
if
[
-n
"$HADOOP_SECURE_DN_PID_DIR"
]
;
then
HADOOP_PID_DIR
=
$HADOOP_SECURE_DN_PID_DIR
fi
if
[
-n
"$HADOOP_SECURE_DN_LOG_DIR"
]
;
then
HADOOP_LOG_DIR
=
$HADOOP_SECURE_DN_LOG_DIR
HADOOP_OPTS
=
"$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
fi
HADOOP_IDENT_STRING
=
$HADOOP_SECURE_DN_USER
HADOOP_OPTS
=
"$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
starting_secure_dn
=
"true"
else
echo
"It looks like you're trying to start a secure DN, but \$JSVC_HOME"
\
"isn't set. Falling back to starting insecure DN."
fi
fi
if
[
"$COMMAND"
=
"namenode"
]
;
then
CLASS
=
'org.apache.hadoop.hdfs.server.namenode.NameNode'
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
elif
[
"$COMMAND"
=
"zkfc"
]
;
then
CLASS
=
'org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_ZKFC_OPTS"
elif
[
"$COMMAND"
=
"secondarynamenode"
]
;
then
CLASS
=
'org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
elif
[
"$COMMAND"
=
"datanode"
]
;
then
CLASS
=
'org.apache.hadoop.hdfs.server.datanode.DataNode'
if
[
"$starting_secure_dn"
=
"true"
]
;
then
HADOOP_OPTS
=
"$HADOOP_OPTS -jvm server $HADOOP_DATANODE_OPTS"
else
HADOOP_OPTS
=
"$HADOOP_OPTS -server $HADOOP_DATANODE_OPTS"
fi
elif
[
"$COMMAND"
=
"journalnode"
]
;
then
CLASS
=
'org.apache.hadoop.hdfs.qjournal.server.JournalNode'
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_JOURNALNODE_OPTS"
elif
[
"$COMMAND"
=
"dfs"
]
;
then
CLASS
=
org.apache.hadoop.fs.FsShell
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif
[
"$COMMAND"
=
"dfsadmin"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.DFSAdmin
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif
[
"$COMMAND"
=
"haadmin"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.DFSHAAdmin
CLASSPATH
=
${
CLASSPATH
}
:
${
TOOL_PATH
}
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif
[
"$COMMAND"
=
"fsck"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.DFSck
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif
[
"$COMMAND"
=
"balancer"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.server.balancer.Balancer
HADOOP_OPTS
=
"$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
elif
[
"$COMMAND"
=
"jmxget"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.JMXGet
elif
[
"$COMMAND"
=
"oiv"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
elif
[
"$COMMAND"
=
"oev"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
elif
[
"$COMMAND"
=
"fetchdt"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
elif
[
"$COMMAND"
=
"getconf"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.GetConf
elif
[
"$COMMAND"
=
"groups"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.GetGroups
elif
[
"$COMMAND"
=
"snapshotDiff"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff
elif
[
"$COMMAND"
=
"lsSnapshottableDir"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
elif
[
"$COMMAND"
=
"portmap"
]
;
then
CLASS
=
org.apache.hadoop.portmap.Portmap
elif
[
"$COMMAND"
=
"nfs3"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.nfs.nfs3.Nfs3
elif
[
"$COMMAND"
=
"cacheadmin"
]
;
then
CLASS
=
org.apache.hadoop.hdfs.tools.CacheAdmin
else
CLASS
=
"$COMMAND"
fi
export
CLASSPATH
=
$CLASSPATH
HADOOP_OPTS
=
"$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
# Check to see if we should start a secure datanode
if
[
"$starting_secure_dn"
=
"true"
]
;
then
if
[
"$HADOOP_PID_DIR"
=
""
]
;
then
HADOOP_SECURE_DN_PID
=
"/tmp/hadoop_secure_dn.pid"
else
HADOOP_SECURE_DN_PID
=
"$HADOOP_PID_DIR/hadoop_secure_dn.pid"
fi
JSVC
=
$JSVC_HOME
/jsvc
if
[
! -f
$JSVC
]
;
then
echo
"JSVC_HOME is not set correctly so jsvc cannot be found. Jsvc is required to run secure datanodes. "
echo
"Please download and install jsvc from http://archive.apache.org/dist/commons/daemon/binaries/ "
\
"and set JSVC_HOME to the directory containing the jsvc binary."
exit
fi
if
[[
!
$JSVC_OUTFILE
]]
;
then
JSVC_OUTFILE
=
"$HADOOP_LOG_DIR/jsvc.out"
fi
if
[[
!
$JSVC_ERRFILE
]]
;
then
JSVC_ERRFILE
=
"$HADOOP_LOG_DIR/jsvc.err"
fi
exec
"$JSVC"
\
-Dproc_
$COMMAND
-outfile
"$JSVC_OUTFILE"
\
-errfile
"$JSVC_ERRFILE"
\
-pidfile
"$HADOOP_SECURE_DN_PID"
\
-nodetach
\
-user
"$HADOOP_SECURE_DN_USER"
\
-cp
"$CLASSPATH"
\
$JAVA_HEAP_MAX
$HADOOP_OPTS
\
org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter
"$@"
else
# run it
exec
"$JAVA"
-Dproc_
$COMMAND
$JAVA_HEAP_MAX
$HADOOP_OPTS
$CLASS
"$@"
fi
Event Timeline
Log In to Comment