hadoop 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # This script runs the hadoop core commands.
  17. bin=`which $0`
  18. bin=`dirname ${bin}`
  19. bin=`cd "$bin"; pwd`
  20. DEFAULT_LIBEXEC_DIR="$bin"/../libexec
  21. if [ -n "$HADOOP_HOME" ]; then
  22. DEFAULT_LIBEXEC_DIR="$HADOOP_HOME"/libexec
  23. fi
  24. HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
  25. . $HADOOP_LIBEXEC_DIR/hadoop-config.sh
  26. function print_usage(){
  27. echo "Usage: hadoop [--config confdir] [COMMAND | CLASSNAME]"
  28. echo " CLASSNAME run the class named CLASSNAME"
  29. echo " or"
  30. echo " where COMMAND is one of:"
  31. echo " fs run a generic filesystem user client"
  32. echo " version print the version"
  33. echo " jar <jar> run a jar file"
  34. echo " note: please use \"yarn jar\" to launch"
  35. echo " YARN applications, not this command."
  36. echo " checknative [-a|-h] check native hadoop and compression libraries availability"
  37. echo " distcp <srcurl> <desturl> copy file or directories recursively"
  38. echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
  39. echo " classpath prints the class path needed to get the"
  40. echo " credential interact with credential providers"
  41. echo " Hadoop jar and the required libraries"
  42. echo " daemonlog get/set the log level for each daemon"
  43. echo " trace view and modify Hadoop tracing settings"
  44. echo ""
  45. echo "Most commands print help when invoked w/o parameters."
  46. }
  47. if [ $# = 0 ]; then
  48. print_usage
  49. exit
  50. fi
  51. COMMAND=$1
  52. case $COMMAND in
  53. # usage flags
  54. --help|-help|-h)
  55. print_usage
  56. exit
  57. ;;
  58. #hdfs commands
  59. namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt|oiv|dfsgroups|portmap|nfs3)
  60. echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2
  61. echo "Instead use the hdfs command for it." 1>&2
  62. echo "" 1>&2
  63. #try to locate hdfs and if present, delegate to it.
  64. shift
  65. if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
  66. exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
  67. elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
  68. exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
  69. else
  70. echo "HADOOP_HDFS_HOME not found!"
  71. exit 1
  72. fi
  73. ;;
  74. #mapred commands for backwards compatibility
  75. pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
  76. echo "DEPRECATED: Use of this script to execute mapred command is deprecated." 1>&2
  77. echo "Instead use the mapred command for it." 1>&2
  78. echo "" 1>&2
  79. #try to locate mapred and if present, delegate to it.
  80. shift
  81. if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
  82. exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
  83. elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
  84. exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
  85. else
  86. echo "HADOOP_MAPRED_HOME not found!"
  87. exit 1
  88. fi
  89. ;;
  90. #core commands
  91. *)
  92. # the core commands
  93. if [ "$COMMAND" = "fs" ] ; then
  94. CLASS=org.apache.hadoop.fs.FsShell
  95. elif [ "$COMMAND" = "version" ] ; then
  96. CLASS=org.apache.hadoop.util.VersionInfo
  97. elif [ "$COMMAND" = "jar" ] ; then
  98. CLASS=org.apache.hadoop.util.RunJar
  99. if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
  100. echo "WARNING: Use \"yarn jar\" to launch YARN applications." 1>&2
  101. fi
  102. elif [ "$COMMAND" = "key" ] ; then
  103. CLASS=org.apache.hadoop.crypto.key.KeyShell
  104. elif [ "$COMMAND" = "checknative" ] ; then
  105. CLASS=org.apache.hadoop.util.NativeLibraryChecker
  106. elif [ "$COMMAND" = "distcp" ] ; then
  107. CLASS=org.apache.hadoop.tools.DistCp
  108. CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  109. elif [ "$COMMAND" = "daemonlog" ] ; then
  110. CLASS=org.apache.hadoop.log.LogLevel
  111. elif [ "$COMMAND" = "archive" ] ; then
  112. CLASS=org.apache.hadoop.tools.HadoopArchives
  113. CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  114. elif [ "$COMMAND" = "credential" ] ; then
  115. CLASS=org.apache.hadoop.security.alias.CredentialShell
  116. elif [ "$COMMAND" = "trace" ] ; then
  117. CLASS=org.apache.hadoop.tracing.TraceAdmin
  118. elif [ "$COMMAND" = "classpath" ] ; then
  119. if [ "$#" -gt 1 ]; then
  120. CLASS=org.apache.hadoop.util.Classpath
  121. else
  122. # No need to bother starting up a JVM for this simple case.
  123. if [ "$cygwin" = true ]; then
  124. CLASSPATH=$(cygpath -p -w "$CLASSPATH" 2>/dev/null)
  125. fi
  126. echo $CLASSPATH
  127. exit
  128. fi
  129. elif [[ "$COMMAND" = -* ]] ; then
  130. # class and package names cannot begin with a -
  131. echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"
  132. exit 1
  133. else
  134. CLASS=$COMMAND
  135. fi
  136. # cygwin path translation
  137. if [ "$cygwin" = true ]; then
  138. CLASSPATH=$(cygpath -p -w "$CLASSPATH" 2>/dev/null)
  139. HADOOP_LOG_DIR=$(cygpath -w "$HADOOP_LOG_DIR" 2>/dev/null)
  140. HADOOP_PREFIX=$(cygpath -w "$HADOOP_PREFIX" 2>/dev/null)
  141. HADOOP_CONF_DIR=$(cygpath -w "$HADOOP_CONF_DIR" 2>/dev/null)
  142. HADOOP_COMMON_HOME=$(cygpath -w "$HADOOP_COMMON_HOME" 2>/dev/null)
  143. HADOOP_HDFS_HOME=$(cygpath -w "$HADOOP_HDFS_HOME" 2>/dev/null)
  144. HADOOP_YARN_HOME=$(cygpath -w "$HADOOP_YARN_HOME" 2>/dev/null)
  145. HADOOP_MAPRED_HOME=$(cygpath -w "$HADOOP_MAPRED_HOME" 2>/dev/null)
  146. fi
  147. shift
  148. # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
  149. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  150. #make sure security appender is turned off
  151. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
  152. export CLASSPATH=$CLASSPATH
  153. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
  154. ;;
  155. esac