123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152 |
- #!/usr/bin/env bash
- # Licensed to the Apache Software Foundation (ASF) under one or more
- # contributor license agreements. See the NOTICE file distributed with
- # this work for additional information regarding copyright ownership.
- # The ASF licenses this file to You under the Apache License, Version 2.0
- # (the "License"); you may not use this file except in compliance with
- # the License. You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- # This script runs the hadoop core commands.
- bin=`which $0`
- bin=`dirname ${bin}`
- bin=`cd "$bin"; pwd`
-
- DEFAULT_LIBEXEC_DIR="$bin"/../libexec
- if [ -n "$HADOOP_HOME" ]; then
- DEFAULT_LIBEXEC_DIR="$HADOOP_HOME"/libexec
- fi
- HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
- . $HADOOP_LIBEXEC_DIR/hadoop-config.sh
- function print_usage(){
- echo "Usage: hadoop [--config confdir] COMMAND"
- echo " where COMMAND is one of:"
- echo " fs run a generic filesystem user client"
- echo " version print the version"
- echo " jar <jar> run a jar file"
- echo " checknative [-a|-h] check native hadoop and compression libraries availability"
- echo " distcp <srcurl> <desturl> copy file or directories recursively"
- echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
- echo " classpath prints the class path needed to get the"
- echo " credential interact with credential providers"
- echo " Hadoop jar and the required libraries"
- echo " daemonlog get/set the log level for each daemon"
- echo " trace view and modify Hadoop tracing settings"
- echo " or"
- echo " CLASSNAME run the class named CLASSNAME"
- echo ""
- echo "Most commands print help when invoked w/o parameters."
- }
- if [ $# = 0 ]; then
- print_usage
- exit
- fi
- COMMAND=$1
- case $COMMAND in
- # usage flags
- --help|-help|-h)
- print_usage
- exit
- ;;
- #hdfs commands
- namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt|oiv|dfsgroups|portmap|nfs3)
- echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2
- echo "Instead use the hdfs command for it." 1>&2
- echo "" 1>&2
- #try to locate hdfs and if present, delegate to it.
- shift
- if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
- exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
- elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
- exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
- else
- echo "HADOOP_HDFS_HOME not found!"
- exit 1
- fi
- ;;
- #mapred commands for backwards compatibility
- pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
- echo "DEPRECATED: Use of this script to execute mapred command is deprecated." 1>&2
- echo "Instead use the mapred command for it." 1>&2
- echo "" 1>&2
- #try to locate mapred and if present, delegate to it.
- shift
- if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
- exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
- elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
- exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
- else
- echo "HADOOP_MAPRED_HOME not found!"
- exit 1
- fi
- ;;
- #core commands
- *)
- # the core commands
- if [ "$COMMAND" = "fs" ] ; then
- CLASS=org.apache.hadoop.fs.FsShell
- elif [ "$COMMAND" = "version" ] ; then
- CLASS=org.apache.hadoop.util.VersionInfo
- elif [ "$COMMAND" = "jar" ] ; then
- CLASS=org.apache.hadoop.util.RunJar
- elif [ "$COMMAND" = "key" ] ; then
- CLASS=org.apache.hadoop.crypto.key.KeyShell
- elif [ "$COMMAND" = "checknative" ] ; then
- CLASS=org.apache.hadoop.util.NativeLibraryChecker
- elif [ "$COMMAND" = "distcp" ] ; then
- CLASS=org.apache.hadoop.tools.DistCp
- CLASSPATH=${CLASSPATH}:${TOOL_PATH}
- elif [ "$COMMAND" = "daemonlog" ] ; then
- CLASS=org.apache.hadoop.log.LogLevel
- elif [ "$COMMAND" = "archive" ] ; then
- CLASS=org.apache.hadoop.tools.HadoopArchives
- CLASSPATH=${CLASSPATH}:${TOOL_PATH}
- elif [ "$COMMAND" = "credential" ] ; then
- CLASS=org.apache.hadoop.security.alias.CredentialShell
- elif [ "$COMMAND" = "trace" ] ; then
- CLASS=org.apache.hadoop.tracing.TraceAdmin
- elif [ "$COMMAND" = "classpath" ] ; then
- if [ "$#" -eq 1 ]; then
- # No need to bother starting up a JVM for this simple case.
- echo $CLASSPATH
- exit
- else
- CLASS=org.apache.hadoop.util.Classpath
- fi
- elif [[ "$COMMAND" = -* ]] ; then
- # class and package names cannot begin with a -
- echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"
- exit 1
- else
- CLASS=$COMMAND
- fi
- shift
-
- # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
- #make sure security appender is turned off
- HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
- export CLASSPATH=$CLASSPATH
- exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
- ;;
- esac
|