hdfs 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. MYNAME="${BASH_SOURCE-$0}"
  17. HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
  18. ## @description build up the hdfs command's usage text.
  19. ## @audience public
  20. ## @stability stable
  21. ## @replaceable no
  22. function hadoop_usage
  23. {
  24. hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
  25. hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
  26. hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in worker mode"
  27. hadoop_add_option "--loglevel level" "set the log4j level for this command"
  28. hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
  29. hadoop_add_option "--workers" "turn on worker mode"
  30. hadoop_add_subcommand "balancer" daemon "run a cluster balancing utility"
  31. hadoop_add_subcommand "cacheadmin" admin "configure the HDFS cache"
  32. hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
  33. hadoop_add_subcommand "crypto" admin "configure HDFS encryption zones"
  34. hadoop_add_subcommand "datanode" daemon "run a DFS datanode"
  35. hadoop_add_subcommand "debug" admin "run a Debug Admin to execute HDFS debug commands"
  36. hadoop_add_subcommand "dfs" client "run a filesystem command on the file system"
  37. hadoop_add_subcommand "dfsadmin" admin "run a DFS admin client"
  38. hadoop_add_subcommand "dfsrouter" daemon "run the DFS router"
  39. hadoop_add_subcommand "dfsrouteradmin" admin "manage Router-based federation"
  40. hadoop_add_subcommand "diskbalancer" daemon "Distributes data evenly among disks on a given node"
  41. hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
  42. hadoop_add_subcommand "ec" admin "run a HDFS ErasureCoding CLI"
  43. hadoop_add_subcommand "fetchdt" client "fetch a delegation token from the NameNode"
  44. hadoop_add_subcommand "fsck" admin "run a DFS filesystem checking utility"
  45. hadoop_add_subcommand "getconf" client "get config values from configuration"
  46. hadoop_add_subcommand "groups" client "get the groups which users belong to"
  47. hadoop_add_subcommand "haadmin" admin "run a DFS HA admin client"
  48. hadoop_add_subcommand "jmxget" admin "get JMX exported values from NameNode or DataNode."
  49. hadoop_add_subcommand "journalnode" daemon "run the DFS journalnode"
  50. hadoop_add_subcommand "lsSnapshottableDir" client "list all snapshottable dirs owned by the current user"
  51. hadoop_add_subcommand "mover" daemon "run a utility to move block replicas across storage types"
  52. hadoop_add_subcommand "namenode" daemon "run the DFS namenode"
  53. hadoop_add_subcommand "nfs3" daemon "run an NFS version 3 gateway"
  54. hadoop_add_subcommand "oev" admin "apply the offline edits viewer to an edits file"
  55. hadoop_add_subcommand "oiv" admin "apply the offline fsimage viewer to an fsimage"
  56. hadoop_add_subcommand "oiv_legacy" admin "apply the offline fsimage viewer to a legacy fsimage"
  57. hadoop_add_subcommand "portmap" daemon "run a portmap service"
  58. hadoop_add_subcommand "secondarynamenode" daemon "run the DFS secondary namenode"
  59. hadoop_add_subcommand "snapshotDiff" client "diff two snapshots of a directory or diff the current directory contents with a snapshot"
  60. hadoop_add_subcommand "storagepolicies" admin "list/get/set block storage policies"
  61. hadoop_add_subcommand "version" client "print the version"
  62. hadoop_add_subcommand "zkfc" daemon "run the ZK Failover Controller daemon"
  63. hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" false
  64. }
  65. ## @description Default command handler for hadoop command
  66. ## @audience public
  67. ## @stability stable
  68. ## @replaceable no
  69. ## @param CLI arguments
  70. function hdfscmd_case
  71. {
  72. subcmd=$1
  73. shift
  74. case ${subcmd} in
  75. balancer)
  76. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  77. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.balancer.Balancer
  78. ;;
  79. cacheadmin)
  80. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CacheAdmin
  81. ;;
  82. classpath)
  83. hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
  84. ;;
  85. crypto)
  86. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CryptoAdmin
  87. ;;
  88. datanode)
  89. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  90. HADOOP_SECURE_CLASSNAME="org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter"
  91. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.datanode.DataNode'
  92. hadoop_deprecate_envvar HADOOP_SECURE_DN_PID_DIR HADOOP_SECURE_PID_DIR
  93. hadoop_deprecate_envvar HADOOP_SECURE_DN_LOG_DIR HADOOP_SECURE_LOG_DIR
  94. ;;
  95. debug)
  96. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.DebugAdmin'
  97. ;;
  98. dfs)
  99. HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell
  100. ;;
  101. dfsadmin)
  102. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSAdmin
  103. ;;
  104. dfsrouter)
  105. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  106. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.federation.router.DFSRouter'
  107. ;;
  108. dfsrouteradmin)
  109. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.federation.RouterAdmin'
  110. ;;
  111. diskbalancer)
  112. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DiskBalancerCLI
  113. ;;
  114. envvars)
  115. echo "JAVA_HOME='${JAVA_HOME}'"
  116. echo "HADOOP_HDFS_HOME='${HADOOP_HDFS_HOME}'"
  117. echo "HDFS_DIR='${HDFS_DIR}'"
  118. echo "HDFS_LIB_JARS_DIR='${HDFS_LIB_JARS_DIR}'"
  119. echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
  120. echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
  121. echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
  122. echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
  123. exit 0
  124. ;;
  125. ec)
  126. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.ECAdmin
  127. ;;
  128. fetchdt)
  129. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
  130. ;;
  131. fsck)
  132. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSck
  133. ;;
  134. getconf)
  135. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.GetConf
  136. ;;
  137. groups)
  138. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.GetGroups
  139. ;;
  140. haadmin)
  141. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSHAAdmin
  142. ;;
  143. journalnode)
  144. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  145. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.qjournal.server.JournalNode'
  146. ;;
  147. jmxget)
  148. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.JMXGet
  149. ;;
  150. lsSnapshottableDir)
  151. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
  152. ;;
  153. mover)
  154. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  155. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.mover.Mover
  156. ;;
  157. namenode)
  158. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  159. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.namenode.NameNode'
  160. hadoop_add_param HADOOP_OPTS hdfs.audit.logger "-Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER}"
  161. ;;
  162. nfs3)
  163. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  164. HADOOP_SECURE_CLASSNAME=org.apache.hadoop.hdfs.nfs.nfs3.PrivilegedNfsGatewayStarter
  165. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.nfs.nfs3.Nfs3
  166. hadoop_deprecate_envvar HADOOP_SECURE_NFS3_LOG_DIR HADOOP_SECURE_LOG_DIR
  167. hadoop_deprecate_envvar HADOOP_SECURE_NFS3_PID_DIR HADOOP_SECURE_PID_DIR
  168. ;;
  169. oev)
  170. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
  171. ;;
  172. oiv)
  173. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewerPB
  174. ;;
  175. oiv_legacy)
  176. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
  177. ;;
  178. portmap)
  179. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  180. HADOOP_CLASSNAME=org.apache.hadoop.portmap.Portmap
  181. ;;
  182. secondarynamenode)
  183. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  184. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
  185. hadoop_add_param HADOOP_OPTS hdfs.audit.logger "-Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER}"
  186. ;;
  187. snapshotDiff)
  188. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff
  189. ;;
  190. storagepolicies)
  191. HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.StoragePolicyAdmin
  192. ;;
  193. version)
  194. HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
  195. ;;
  196. zkfc)
  197. HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
  198. HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
  199. ;;
  200. *)
  201. HADOOP_CLASSNAME="${subcmd}"
  202. if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
  203. hadoop_exit_with_usage 1
  204. fi
  205. ;;
  206. esac
  207. }
  208. # let's locate libexec...
  209. if [[ -n "${HADOOP_HOME}" ]]; then
  210. HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
  211. else
  212. bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
  213. HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
  214. fi
  215. HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
  216. HADOOP_NEW_CONFIG=true
  217. if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then
  218. # shellcheck source=./hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
  219. . "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh"
  220. else
  221. echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1
  222. exit 1
  223. fi
  224. # now that we have support code, let's abs MYNAME so we can use it later
  225. MYNAME=$(hadoop_abs "${MYNAME}")
  226. if [[ $# = 0 ]]; then
  227. hadoop_exit_with_usage 1
  228. fi
  229. HADOOP_SUBCMD=$1
  230. shift
  231. if hadoop_need_reexec hdfs "${HADOOP_SUBCMD}"; then
  232. hadoop_uservar_su hdfs "${HADOOP_SUBCMD}" \
  233. "${MYNAME}" \
  234. "--reexec" \
  235. "${HADOOP_USER_PARAMS[@]}"
  236. exit $?
  237. fi
  238. hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
  239. HADOOP_SUBCMD_ARGS=("$@")
  240. if declare -f hdfs_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
  241. hadoop_debug "Calling dynamically: hdfs_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
  242. "hdfs_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
  243. else
  244. hdfscmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
  245. fi
  246. hadoop_add_client_opts
  247. if [[ ${HADOOP_WORKER_MODE} = true ]]; then
  248. hadoop_common_worker_mode_execute "${HADOOP_HDFS_HOME}/bin/hdfs" "${HADOOP_USER_PARAMS[@]}"
  249. exit $?
  250. fi
  251. hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
  252. # everything is in globals at this point, so call the generic handler
  253. hadoop_generic_java_subcmd_handler