hadoop 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # This script runs the hadoop core commands.
  17. bin=`which $0`
  18. bin=`dirname ${bin}`
  19. bin=`cd "$bin"; pwd`
  20. DEFAULT_LIBEXEC_DIR="$bin"/../libexec
  21. HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
  22. . $HADOOP_LIBEXEC_DIR/hadoop-config.sh
  23. function print_usage(){
  24. echo "Usage: hadoop [--config confdir] COMMAND"
  25. echo " where COMMAND is one of:"
  26. echo " fs run a generic filesystem user client"
  27. echo " version print the version"
  28. echo " jar <jar> run a jar file"
  29. echo " checknative [-a|-h] check native hadoop and compression libraries availability"
  30. echo " distcp <srcurl> <desturl> copy file or directories recursively"
  31. echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
  32. echo " classpath prints the class path needed to get the"
  33. echo " credential interact with credential providers"
  34. echo " Hadoop jar and the required libraries"
  35. echo " daemonlog get/set the log level for each daemon"
  36. echo " trace view and modify Hadoop tracing settings"
  37. echo " or"
  38. echo " CLASSNAME run the class named CLASSNAME"
  39. echo ""
  40. echo "Most commands print help when invoked w/o parameters."
  41. }
  42. if [ $# = 0 ]; then
  43. print_usage
  44. exit
  45. fi
  46. COMMAND=$1
  47. case $COMMAND in
  48. # usage flags
  49. --help|-help|-h)
  50. print_usage
  51. exit
  52. ;;
  53. #hdfs commands
  54. namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt|oiv|dfsgroups|portmap|nfs3)
  55. echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2
  56. echo "Instead use the hdfs command for it." 1>&2
  57. echo "" 1>&2
  58. #try to locate hdfs and if present, delegate to it.
  59. shift
  60. if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
  61. exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
  62. elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
  63. exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
  64. else
  65. echo "HADOOP_HDFS_HOME not found!"
  66. exit 1
  67. fi
  68. ;;
  69. #mapred commands for backwards compatibility
  70. pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
  71. echo "DEPRECATED: Use of this script to execute mapred command is deprecated." 1>&2
  72. echo "Instead use the mapred command for it." 1>&2
  73. echo "" 1>&2
  74. #try to locate mapred and if present, delegate to it.
  75. shift
  76. if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
  77. exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
  78. elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
  79. exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
  80. else
  81. echo "HADOOP_MAPRED_HOME not found!"
  82. exit 1
  83. fi
  84. ;;
  85. #core commands
  86. *)
  87. # the core commands
  88. if [ "$COMMAND" = "fs" ] ; then
  89. CLASS=org.apache.hadoop.fs.FsShell
  90. elif [ "$COMMAND" = "version" ] ; then
  91. CLASS=org.apache.hadoop.util.VersionInfo
  92. elif [ "$COMMAND" = "jar" ] ; then
  93. CLASS=org.apache.hadoop.util.RunJar
  94. elif [ "$COMMAND" = "key" ] ; then
  95. CLASS=org.apache.hadoop.crypto.key.KeyShell
  96. elif [ "$COMMAND" = "checknative" ] ; then
  97. CLASS=org.apache.hadoop.util.NativeLibraryChecker
  98. elif [ "$COMMAND" = "distcp" ] ; then
  99. CLASS=org.apache.hadoop.tools.DistCp
  100. CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  101. elif [ "$COMMAND" = "daemonlog" ] ; then
  102. CLASS=org.apache.hadoop.log.LogLevel
  103. elif [ "$COMMAND" = "archive" ] ; then
  104. CLASS=org.apache.hadoop.tools.HadoopArchives
  105. CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  106. elif [ "$COMMAND" = "credential" ] ; then
  107. CLASS=org.apache.hadoop.security.alias.CredentialShell
  108. elif [ "$COMMAND" = "trace" ] ; then
  109. CLASS=org.apache.hadoop.tracing.TraceAdmin
  110. elif [ "$COMMAND" = "classpath" ] ; then
  111. if [ "$#" -eq 1 ]; then
  112. # No need to bother starting up a JVM for this simple case.
  113. echo $CLASSPATH
  114. exit
  115. else
  116. CLASS=org.apache.hadoop.util.Classpath
  117. fi
  118. elif [[ "$COMMAND" = -* ]] ; then
  119. # class and package names cannot begin with a -
  120. echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"
  121. exit 1
  122. else
  123. CLASS=$COMMAND
  124. fi
  125. shift
  126. # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
  127. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  128. #make sure security appender is turned off
  129. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
  130. export CLASSPATH=$CLASSPATH
  131. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
  132. ;;
  133. esac