hadoop 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # This script runs the hadoop core commands.
  17. bin=`which $0`
  18. bin=`dirname ${bin}`
  19. bin=`cd "$bin"; pwd`
  20. DEFAULT_LIBEXEC_DIR="$bin"/../libexec
  21. HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
  22. . $HADOOP_LIBEXEC_DIR/hadoop-config.sh
  23. function print_usage(){
  24. echo "Usage: hadoop [--config confdir] [COMMAND | CLASSNAME]"
  25. echo " CLASSNAME run the class named CLASSNAME"
  26. echo " or"
  27. echo " where COMMAND is one of:"
  28. echo " fs run a generic filesystem user client"
  29. echo " version print the version"
  30. echo " jar <jar> run a jar file"
  31. echo " note: please use \"yarn jar\" to launch"
  32. echo " YARN applications, not this command."
  33. echo " checknative [-a|-h] check native hadoop and compression libraries availability"
  34. echo " distcp <srcurl> <desturl> copy file or directories recursively"
  35. echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
  36. echo " classpath prints the class path needed to get the"
  37. echo " Hadoop jar and the required libraries"
  38. echo " credential interact with credential providers"
  39. echo " daemonlog get/set the log level for each daemon"
  40. echo " trace view and modify Hadoop tracing settings"
  41. echo ""
  42. echo "Most commands print help when invoked w/o parameters."
  43. }
  44. if [ $# = 0 ]; then
  45. print_usage
  46. exit
  47. fi
  48. COMMAND=$1
  49. case $COMMAND in
  50. # usage flags
  51. --help|-help|-h)
  52. print_usage
  53. exit
  54. ;;
  55. #hdfs commands
  56. namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt|oiv|dfsgroups|portmap|nfs3)
  57. echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2
  58. echo "Instead use the hdfs command for it." 1>&2
  59. echo "" 1>&2
  60. #try to locate hdfs and if present, delegate to it.
  61. shift
  62. if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
  63. exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
  64. elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
  65. exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
  66. else
  67. echo "HADOOP_HDFS_HOME not found!"
  68. exit 1
  69. fi
  70. ;;
  71. #mapred commands for backwards compatibility
  72. pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
  73. echo "DEPRECATED: Use of this script to execute mapred command is deprecated." 1>&2
  74. echo "Instead use the mapred command for it." 1>&2
  75. echo "" 1>&2
  76. #try to locate mapred and if present, delegate to it.
  77. shift
  78. if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
  79. exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
  80. elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
  81. exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
  82. else
  83. echo "HADOOP_MAPRED_HOME not found!"
  84. exit 1
  85. fi
  86. ;;
  87. #core commands
  88. *)
  89. # the core commands
  90. if [ "$COMMAND" = "fs" ] ; then
  91. CLASS=org.apache.hadoop.fs.FsShell
  92. elif [ "$COMMAND" = "version" ] ; then
  93. CLASS=org.apache.hadoop.util.VersionInfo
  94. elif [ "$COMMAND" = "jar" ] ; then
  95. CLASS=org.apache.hadoop.util.RunJar
  96. if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
  97. echo "WARNING: Use \"yarn jar\" to launch YARN applications." 1>&2
  98. fi
  99. elif [ "$COMMAND" = "key" ] ; then
  100. CLASS=org.apache.hadoop.crypto.key.KeyShell
  101. elif [ "$COMMAND" = "checknative" ] ; then
  102. CLASS=org.apache.hadoop.util.NativeLibraryChecker
  103. elif [ "$COMMAND" = "distcp" ] ; then
  104. CLASS=org.apache.hadoop.tools.DistCp
  105. CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  106. elif [ "$COMMAND" = "daemonlog" ] ; then
  107. CLASS=org.apache.hadoop.log.LogLevel
  108. elif [ "$COMMAND" = "archive" ] ; then
  109. CLASS=org.apache.hadoop.tools.HadoopArchives
  110. CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  111. elif [ "$COMMAND" = "credential" ] ; then
  112. CLASS=org.apache.hadoop.security.alias.CredentialShell
  113. elif [ "$COMMAND" = "trace" ] ; then
  114. CLASS=org.apache.hadoop.tracing.TraceAdmin
  115. elif [ "$COMMAND" = "classpath" ] ; then
  116. if [ "$#" -gt 1 ]; then
  117. CLASS=org.apache.hadoop.util.Classpath
  118. else
  119. # No need to bother starting up a JVM for this simple case.
  120. if [ "$cygwin" = true ]; then
  121. CLASSPATH=$(cygpath -p -w "$CLASSPATH" 2>/dev/null)
  122. fi
  123. echo $CLASSPATH
  124. exit
  125. fi
  126. elif [[ "$COMMAND" = -* ]] ; then
  127. # class and package names cannot begin with a -
  128. echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"
  129. exit 1
  130. else
  131. CLASS=$COMMAND
  132. fi
  133. # cygwin path translation
  134. if [ "$cygwin" = true ]; then
  135. CLASSPATH=$(cygpath -p -w "$CLASSPATH" 2>/dev/null)
  136. HADOOP_LOG_DIR=$(cygpath -w "$HADOOP_LOG_DIR" 2>/dev/null)
  137. HADOOP_PREFIX=$(cygpath -w "$HADOOP_PREFIX" 2>/dev/null)
  138. HADOOP_CONF_DIR=$(cygpath -w "$HADOOP_CONF_DIR" 2>/dev/null)
  139. HADOOP_COMMON_HOME=$(cygpath -w "$HADOOP_COMMON_HOME" 2>/dev/null)
  140. HADOOP_HDFS_HOME=$(cygpath -w "$HADOOP_HDFS_HOME" 2>/dev/null)
  141. HADOOP_YARN_HOME=$(cygpath -w "$HADOOP_YARN_HOME" 2>/dev/null)
  142. HADOOP_MAPRED_HOME=$(cygpath -w "$HADOOP_MAPRED_HOME" 2>/dev/null)
  143. fi
  144. shift
  145. # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
  146. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  147. #make sure security appender is turned off
  148. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
  149. export CLASSPATH=$CLASSPATH
  150. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
  151. ;;
  152. esac