Browse Source

Hadoop 3.0.0 windows binaries; off the release 3.0 tag, patched with HADOOP-14877-001 patch to get the windows build to work

Steve Loughran 7 years ago
parent
commit
84d87f10bd

+ 223 - 0
hadoop-3.0.0/bin/hadoop

@ -0,0 +1,223 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description  build up the hadoop command's usage text.
## @audience     public
## @stability    stable
## @replaceable  no
function hadoop_usage
{
  hadoop_add_option "buildpaths" "attempt to add class files from build tree"
  hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in slave mode"
  hadoop_add_option "loglevel level" "set the log4j level for this command"
  hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
  hadoop_add_option "workers" "turn on worker mode"
  hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability"
  hadoop_add_subcommand "classpath" client "prints the class path needed to get the Hadoop jar and the required libraries"
  hadoop_add_subcommand "conftest" client "validate configuration XML files"
  hadoop_add_subcommand "credential" client "interact with credential providers"
  hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon"
  hadoop_add_subcommand "dtutil" client "operations related to delegation tokens"
  hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
  hadoop_add_subcommand "fs" client "run a generic filesystem user client"
  hadoop_add_subcommand "jar <jar>" client "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command."
  hadoop_add_subcommand "jnipath" client "prints the java.library.path"
  hadoop_add_subcommand "kerbname" client "show auth_to_local principal conversion"
  hadoop_add_subcommand "key" client "manage keys via the KeyProvider"
  hadoop_add_subcommand "trace" client "view and modify Hadoop tracing settings"
  hadoop_add_subcommand "version" client "print the version"
  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
## @description  Default command handler for hadoop command
## @audience     public
## @stability    stable
## @replaceable  no
## @param        CLI arguments
function hadoopcmd_case
{
  subcmd=$1
  shift
  case ${subcmd} in
    balancer|datanode|dfs|dfsadmin|dfsgroups|  \
    namenode|secondarynamenode|fsck|fetchdt|oiv| \
    portmap|nfs3)
      hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated."
      subcmd=${subcmd/dfsgroups/groups}
      hadoop_error "WARNING: Attempting to execute replacement \"hdfs ${subcmd}\" instead."
      hadoop_error ""
      #try to locate hdfs and if present, delegate to it.
      if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then
        exec "${HADOOP_HDFS_HOME}/bin/hdfs" \
          --config "${HADOOP_CONF_DIR}" "${subcmd}"  "$@"
      elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then
        exec "${HADOOP_HOME}/bin/hdfs" \
          --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
      else
        hadoop_error "HADOOP_HDFS_HOME not found!"
        exit 1
      fi
    ;;
    #mapred commands for backwards compatibility
    pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
      hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated."
      subcmd=${subcmd/mrgroups/groups}
      hadoop_error "WARNING: Attempting to execute replacement \"mapred ${subcmd}\" instead."
      hadoop_error ""
      #try to locate mapred and if present, delegate to it.
      if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then
        exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
        --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
      elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then
        exec "${HADOOP_HOME}/bin/mapred" \
        --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
      else
        hadoop_error "HADOOP_MAPRED_HOME not found!"
        exit 1
      fi
    ;;
    checknative)
      HADOOP_CLASSNAME=org.apache.hadoop.util.NativeLibraryChecker
    ;;
    classpath)
      hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
    ;;
    conftest)
      HADOOP_CLASSNAME=org.apache.hadoop.util.ConfTest
    ;;
    credential)
      HADOOP_CLASSNAME=org.apache.hadoop.security.alias.CredentialShell
    ;;
    daemonlog)
      HADOOP_CLASSNAME=org.apache.hadoop.log.LogLevel
    ;;
    dtutil)
      HADOOP_CLASSNAME=org.apache.hadoop.security.token.DtUtilShell
    ;;
    envvars)
      echo "JAVA_HOME='${JAVA_HOME}'"
      echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"
      echo "HADOOP_COMMON_DIR='${HADOOP_COMMON_DIR}'"
      echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'"
      echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'"
      echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
      echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
      echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
      echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
      exit 0
    ;;
    fs)
      HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell
    ;;
    jar)
      if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
        hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications."
      fi
      HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar
    ;;
    jnipath)
      hadoop_finalize
      echo "${JAVA_LIBRARY_PATH}"
      exit 0
    ;;
    kerbname)
      HADOOP_CLASSNAME=org.apache.hadoop.security.HadoopKerberosName
    ;;
    key)
      HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.KeyShell
    ;;
    trace)
      HADOOP_CLASSNAME=org.apache.hadoop.tracing.TraceAdmin
    ;;
    version)
      HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
    ;;
    *)
      HADOOP_CLASSNAME="${subcmd}"
      if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
        hadoop_exit_with_usage 1
      fi
    ;;
  esac
}
# This script runs the hadoop core commands.
# let's locate libexec...
if [[ -n "${HADOOP_HOME}" ]]; then
  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
else
  bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
HADOOP_NEW_CONFIG=true
if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
  # shellcheck source=./hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
  . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
else
  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
  exit 1
fi
# now that we have support code, let's abs MYNAME so we can use it later
MYNAME=$(hadoop_abs "${MYNAME}")
if [[ $# = 0 ]]; then
  hadoop_exit_with_usage 1
fi
HADOOP_SUBCMD=$1
shift
if hadoop_need_reexec hadoop "${HADOOP_SUBCMD}"; then
  hadoop_uservar_su hadoop "${HADOOP_SUBCMD}" \
    "${MYNAME}" \
    "--reexec" \
    "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
HADOOP_SUBCMD_ARGS=("$@")
if declare -f hadoop_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
  hadoop_debug "Calling dynamically: hadoop_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
  "hadoop_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
else
  hadoopcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
fi
hadoop_add_client_opts
if [[ ${HADOOP_WORKER_MODE} = true ]]; then
  hadoop_common_worker_mode_execute "${HADOOP_COMMON_HOME}/bin/hadoop" "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
# everything is in globals at this point, so call the generic handler
hadoop_generic_java_subcmd_handler

+ 318 - 0
hadoop-3.0.0/bin/hadoop.cmd

@ -0,0 +1,318 @@
@echo off
@rem Licensed to the Apache Software Foundation (ASF) under one or more
@rem contributor license agreements.  See the NOTICE file distributed with
@rem this work for additional information regarding copyright ownership.
@rem The ASF licenses this file to You under the Apache License, Version 2.0
@rem (the "License"); you may not use this file except in compliance with
@rem the License.  You may obtain a copy of the License at
@rem
@rem     http://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem This script runs the hadoop core commands. 
@rem Environment Variables
@rem
@rem   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
@rem
@rem   HADOOP_CLASSPATH Extra Java CLASSPATH entries.
@rem
@rem   HADOOP_USER_CLASSPATH_FIRST      When defined, the HADOOP_CLASSPATH is
@rem                                    added in the beginning of the global
@rem                                    classpath. Can be defined, for example,
@rem                                    by doing
@rem                                    export HADOOP_USER_CLASSPATH_FIRST=true
@rem
@rem   HADOOP_USE_CLIENT_CLASSLOADER    When defined, HADOOP_CLASSPATH and the
@rem                                    jar as the hadoop jar argument are
@rem                                    handled by a separate isolated client
@rem                                    classloader. If it is set,
@rem                                    HADOOP_USER_CLASSPATH_FIRST is
@rem                                    ignored. Can be defined by doing
@rem                                    export HADOOP_USE_CLIENT_CLASSLOADER=true
@rem
@rem   HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES
@rem                                    When defined, it overrides the default
@rem                                    definition of system classes for the
@rem                                    client classloader when
@rem                                    HADOOP_USE_CLIENT_CLASSLOADER is
@rem                                    enabled. Names ending in '.' (period)
@rem                                    are treated as package names, and names
@rem                                    starting with a '-' are treated as
@rem                                    negative matches. For example,
@rem                                    export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop."
@rem
@rem   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB.
@rem                    Default is 1000.
@rem
@rem   HADOOP_OPTS      Extra Java runtime options.
@rem
@rem   HADOOP_CLIENT_OPTS         when the respective command is run.
@rem   HADOOP_{COMMAND}_OPTS etc  HADOOP_JT_OPTS applies to JobTracker
@rem                              for e.g.  HADOOP_CLIENT_OPTS applies to
@rem                              more than one command (fs, dfs, fsck,
@rem                              dfsadmin etc)
@rem
@rem   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
@rem
@rem   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
@rem
if not defined HADOOP_BIN_PATH ( 
  set HADOOP_BIN_PATH=%~dp0
)
if "%HADOOP_BIN_PATH:~-1%" == "\" (
  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
)
call :updatepath %HADOOP_BIN_PATH%
:main
  setlocal enabledelayedexpansion
  set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
  if not defined HADOOP_LIBEXEC_DIR (
    set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
  )
  call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
  if "%1" == "--config" (
    shift
    shift
  )
  if "%1" == "--loglevel" (
    shift
    shift
  )
  set hadoop-command=%1
  if not defined hadoop-command (
      goto print_usage
  )
  call :make_command_arguments %*
  set hdfscommands=namenode secondarynamenode datanode dfs dfsadmin fsck balancer fetchdt oiv dfsgroups
  for %%i in ( %hdfscommands% ) do (
    if %hadoop-command% == %%i set hdfscommand=true
  )
  if defined hdfscommand (
    @echo DEPRECATED: Use of this script to execute hdfs command is deprecated. 1>&2
    @echo Instead use the hdfs command for it. 1>&2
    if exist %HADOOP_HDFS_HOME%\bin\hdfs.cmd (
      call %HADOOP_HDFS_HOME%\bin\hdfs.cmd %*
      goto :eof
    ) else if exist %HADOOP_HOME%\bin\hdfs.cmd (
      call %HADOOP_HOME%\bin\hdfs.cmd %*
      goto :eof
    ) else (
      echo HADOOP_HDFS_HOME not found!
      goto :eof
    )
  )
  set mapredcommands=pipes job queue mrgroups mradmin jobtracker tasktracker
  for %%i in ( %mapredcommands% ) do (
    if %hadoop-command% == %%i set mapredcommand=true  
  )
  if defined mapredcommand (
    @echo DEPRECATED: Use of this script to execute mapred command is deprecated. 1>&2
    @echo Instead use the mapred command for it. 1>&2
    if exist %HADOOP_MAPRED_HOME%\bin\mapred.cmd (
      call %HADOOP_MAPRED_HOME%\bin\mapred.cmd %*
      goto :eof
    ) else if exist %HADOOP_HOME%\bin\mapred.cmd (
      call %HADOOP_HOME%\bin\mapred.cmd %*
      goto :eof
    ) else (
      echo HADOOP_MAPRED_HOME not found!
      goto :eof
    )
  )
  if %hadoop-command% == classpath (
    if not defined hadoop-command-arguments (
      @rem No need to bother starting up a JVM for this simple case.
      @echo %CLASSPATH%
      exit /b
    )
  ) else if %hadoop-command% == jnipath (
    echo !PATH!
    exit /b
  )
  set corecommands=fs version jar checknative conftest distch distcp daemonlog archive classpath credential kerbname key trace
  for %%i in ( %corecommands% ) do (
    if %hadoop-command% == %%i set corecommand=true  
  )
  if defined corecommand (
    call :%hadoop-command%
  ) else (
    set CLASSPATH=%CLASSPATH%;%CD%
    set CLASS=%hadoop-command%
  )
  set path=%PATH%;%HADOOP_BIN_PATH%
  @rem Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  @rem make sure security appender is turned off
  if not defined HADOOP_SECURITY_LOGGER (
    set HADOOP_SECURITY_LOGGER=INFO,NullAppender
  )
  set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER%
  call %JAVA% %JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hadoop-command-arguments%
  exit /b %ERRORLEVEL%
:fs 
  set CLASS=org.apache.hadoop.fs.FsShell
  goto :eof
:version 
  set CLASS=org.apache.hadoop.util.VersionInfo
  goto :eof
:jar
  if defined YARN_OPTS (
    @echo WARNING: Use "yarn jar" to launch YARN applications.
  ) else if defined YARN_CLIENT_OPTS (
    @echo WARNING: Use "yarn jar" to launch YARN applications.
  )
  set CLASS=org.apache.hadoop.util.RunJar
  goto :eof
:checknative
  set CLASS=org.apache.hadoop.util.NativeLibraryChecker
  goto :eof
:conftest
  set CLASS=org.apache.hadoop.util.ConfTest
  goto :eof
:distch
  set CLASS=org.apache.hadoop.tools.DistCh
  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
  goto :eof
:distcp
  set CLASS=org.apache.hadoop.tools.DistCp
  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
  goto :eof
:daemonlog
  set CLASS=org.apache.hadoop.log.LogLevel
  goto :eof
:archive
  set CLASS=org.apache.hadoop.tools.HadoopArchives
  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
  goto :eof
:classpath
  set CLASS=org.apache.hadoop.util.Classpath
  goto :eof
:credential
  set CLASS=org.apache.hadoop.security.alias.CredentialShell
  goto :eof
:kerbname
  set CLASS=org.apache.hadoop.security.HadoopKerberosName
  goto :eof
:key
  set CLASS=org.apache.hadoop.crypto.key.KeyShell
  goto :eof
:trace
  set CLASS=org.apache.hadoop.tracing.TraceAdmin
  goto :eof
:updatepath
  set path_to_add=%*
  set current_path_comparable=%path%
  set current_path_comparable=%current_path_comparable: =_%
  set current_path_comparable=%current_path_comparable:(=_%
  set current_path_comparable=%current_path_comparable:)=_%
  set path_to_add_comparable=%path_to_add%
  set path_to_add_comparable=%path_to_add_comparable: =_%
  set path_to_add_comparable=%path_to_add_comparable:(=_%
  set path_to_add_comparable=%path_to_add_comparable:)=_%
  for %%i in ( %current_path_comparable% ) do (
    if /i "%%i" == "%path_to_add_comparable%" (
      set path_to_add_exist=true
    )
  )
  set system_path_comparable=
  set path_to_add_comparable=
  if not defined path_to_add_exist path=%path_to_add%;%path%
  set path_to_add=
  goto :eof
@rem This changes %1, %2 etc. Hence those cannot be used after calling this.
:make_command_arguments
  if "%1" == "--config" (
    shift
    shift
  )
  if "%1" == "--loglevel" (
    shift
    shift
  )
  if [%2] == [] goto :eof
  shift
  set _arguments=
  :MakeCmdArgsLoop 
  if [%1]==[] goto :EndLoop 
  if not defined _arguments (
    set _arguments=%1
  ) else (
    set _arguments=!_arguments! %1
  )
  shift
  goto :MakeCmdArgsLoop 
  :EndLoop 
  set hadoop-command-arguments=%_arguments%
  goto :eof
:print_usage
  @echo Usage: hadoop [--config confdir] [--loglevel loglevel] COMMAND
  @echo where COMMAND is one of:
  @echo   fs                   run a generic filesystem user client
  @echo   version              print the version
  @echo   jar ^<jar^>            run a jar file
  @echo                        note: please use "yarn jar" to launch
  @echo                              YARN applications, not this command.
  @echo   checknative [-a^|-h]  check native hadoop and compression libraries availability
  @echo   conftest             validate configuration XML files
  @echo   distch path:owner:group:permisson
  @echo                        distributed metadata changer
  @echo   distcp ^<srcurl^> ^<desturl^> copy file or directories recursively
  @echo   archive -archiveName NAME -p ^<parent path^> ^<src^>* ^<dest^> create a hadoop archive
  @echo   classpath            prints the class path needed to get the
  @echo                        Hadoop jar and the required libraries
  @echo   credential           interact with credential providers
  @echo   jnipath              prints the java.library.path
  @echo   kerbname             show auth_to_local principal conversion
  @echo   key                  manage keys via the KeyProvider
  @echo   trace                view and modify Hadoop tracing settings
  @echo   daemonlog            get/set the log level for each daemon
  @echo  or
  @echo   CLASSNAME            run the class named CLASSNAME
  @echo.
  @echo Most commands print help when invoked w/o parameters.
endlocal

BIN
hadoop-3.0.0/bin/hadoop.dll


BIN
hadoop-3.0.0/bin/hadoop.exp


BIN
hadoop-3.0.0/bin/hadoop.lib


BIN
hadoop-3.0.0/bin/hadoop.pdb


+ 272 - 0
hadoop-3.0.0/bin/hdfs

@ -0,0 +1,272 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description  build up the hdfs command's usage text.
## @audience     public
## @stability    stable
## @replaceable  no
function hadoop_usage
{
  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
  hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
  hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in worker mode"
  hadoop_add_option "--loglevel level" "set the log4j level for this command"
  hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
  hadoop_add_option "--workers" "turn on worker mode"
  hadoop_add_subcommand "balancer" daemon "run a cluster balancing utility"
  hadoop_add_subcommand "cacheadmin" admin "configure the HDFS cache"
  hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
  hadoop_add_subcommand "crypto" admin "configure HDFS encryption zones"
  hadoop_add_subcommand "datanode" daemon "run a DFS datanode"
  hadoop_add_subcommand "debug" admin "run a Debug Admin to execute HDFS debug commands"
  hadoop_add_subcommand "dfs" client "run a filesystem command on the file system"
  hadoop_add_subcommand "dfsadmin" admin "run a DFS admin client"
  hadoop_add_subcommand "dfsrouter" daemon "run the DFS router"
  hadoop_add_subcommand "dfsrouteradmin" admin "manage Router-based federation"
  hadoop_add_subcommand "diskbalancer" daemon "Distributes data evenly among disks on a given node"
  hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
  hadoop_add_subcommand "ec" admin "run a HDFS ErasureCoding CLI"
  hadoop_add_subcommand "fetchdt" client "fetch a delegation token from the NameNode"
  hadoop_add_subcommand "fsck" admin "run a DFS filesystem checking utility"
  hadoop_add_subcommand "getconf" client "get config values from configuration"
  hadoop_add_subcommand "groups" client "get the groups which users belong to"
  hadoop_add_subcommand "haadmin" admin "run a DFS HA admin client"
  hadoop_add_subcommand "jmxget" admin "get JMX exported values from NameNode or DataNode."
  hadoop_add_subcommand "journalnode" daemon "run the DFS journalnode"
  hadoop_add_subcommand "lsSnapshottableDir" client "list all snapshottable dirs owned by the current user"
  hadoop_add_subcommand "mover" daemon "run a utility to move block replicas across storage types"
  hadoop_add_subcommand "namenode" daemon "run the DFS namenode"
  hadoop_add_subcommand "nfs3" daemon "run an NFS version 3 gateway"
  hadoop_add_subcommand "oev" admin "apply the offline edits viewer to an edits file"
  hadoop_add_subcommand "oiv" admin "apply the offline fsimage viewer to an fsimage"
  hadoop_add_subcommand "oiv_legacy" admin "apply the offline fsimage viewer to a legacy fsimage"
  hadoop_add_subcommand "portmap" daemon "run a portmap service"
  hadoop_add_subcommand "secondarynamenode" daemon "run the DFS secondary namenode"
  hadoop_add_subcommand "snapshotDiff" client "diff two snapshots of a directory or diff the current directory contents with a snapshot"
  hadoop_add_subcommand "storagepolicies" admin "list/get/set block storage policies"
  hadoop_add_subcommand "version" client "print the version"
  hadoop_add_subcommand "zkfc" daemon "run the ZK Failover Controller daemon"
  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" false
}
## @description  Default command handler for hadoop command
## @audience     public
## @stability    stable
## @replaceable  no
## @param        CLI arguments
function hdfscmd_case
{
  subcmd=$1
  shift
  case ${subcmd} in
    balancer)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.balancer.Balancer
    ;;
    cacheadmin)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CacheAdmin
    ;;
    classpath)
      hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
    ;;
    crypto)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CryptoAdmin
    ;;
    datanode)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_SECURE_CLASSNAME="org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter"
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.datanode.DataNode'
      hadoop_deprecate_envvar HADOOP_SECURE_DN_PID_DIR HADOOP_SECURE_PID_DIR
      hadoop_deprecate_envvar HADOOP_SECURE_DN_LOG_DIR HADOOP_SECURE_LOG_DIR
    ;;
    debug)
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.DebugAdmin'
    ;;
    dfs)
      HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell
    ;;
    dfsadmin)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSAdmin
    ;;
    dfsrouter)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.federation.router.DFSRouter'
    ;;
    dfsrouteradmin)
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.federation.RouterAdmin'
    ;;
    diskbalancer)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DiskBalancerCLI
    ;;
    envvars)
      echo "JAVA_HOME='${JAVA_HOME}'"
      echo "HADOOP_HDFS_HOME='${HADOOP_HDFS_HOME}'"
      echo "HDFS_DIR='${HDFS_DIR}'"
      echo "HDFS_LIB_JARS_DIR='${HDFS_LIB_JARS_DIR}'"
      echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
      echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
      echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
      echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
      exit 0
    ;;
    ec)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.ECAdmin
    ;;
    fetchdt)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
    ;;
    fsck)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSck
    ;;
    getconf)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.GetConf
    ;;
    groups)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.GetGroups
    ;;
    haadmin)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSHAAdmin
    ;;
    journalnode)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.qjournal.server.JournalNode'
    ;;
    jmxget)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.JMXGet
    ;;
    lsSnapshottableDir)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
    ;;
    mover)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.mover.Mover
    ;;
    namenode)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.namenode.NameNode'
      hadoop_add_param HADOOP_OPTS hdfs.audit.logger "-Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER}"
    ;;
    nfs3)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_SECURE_CLASSNAME=org.apache.hadoop.hdfs.nfs.nfs3.PrivilegedNfsGatewayStarter
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.nfs.nfs3.Nfs3
      hadoop_deprecate_envvar HADOOP_SECURE_NFS3_LOG_DIR HADOOP_SECURE_LOG_DIR
      hadoop_deprecate_envvar HADOOP_SECURE_NFS3_PID_DIR HADOOP_SECURE_PID_DIR
    ;;
    oev)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
    ;;
    oiv)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewerPB
    ;;
    oiv_legacy)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
    ;;
    portmap)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME=org.apache.hadoop.portmap.Portmap
    ;;
    secondarynamenode)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
      hadoop_add_param HADOOP_OPTS hdfs.audit.logger "-Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER}"
    ;;
    snapshotDiff)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff
    ;;
    storagepolicies)
      HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.StoragePolicyAdmin
    ;;
    version)
      HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
    ;;
    zkfc)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
    ;;
    *)
      HADOOP_CLASSNAME="${subcmd}"
      if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
        hadoop_exit_with_usage 1
      fi
    ;;
  esac
}
# let's locate libexec...
if [[ -n "${HADOOP_HOME}" ]]; then
  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
else
  bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
HADOOP_NEW_CONFIG=true
if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then
  # shellcheck source=./hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
  . "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh"
else
  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1
  exit 1
fi
# now that we have support code, let's abs MYNAME so we can use it later
MYNAME=$(hadoop_abs "${MYNAME}")
if [[ $# = 0 ]]; then
  hadoop_exit_with_usage 1
fi
HADOOP_SUBCMD=$1
shift
if hadoop_need_reexec hdfs "${HADOOP_SUBCMD}"; then
  hadoop_uservar_su hdfs "${HADOOP_SUBCMD}" \
    "${MYNAME}" \
    "--reexec" \
    "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
HADOOP_SUBCMD_ARGS=("$@")
if declare -f hdfs_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
  hadoop_debug "Calling dynamically: hdfs_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
  "hdfs_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
else
  hdfscmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
fi
hadoop_add_client_opts
if [[ ${HADOOP_WORKER_MODE} = true ]]; then
  hadoop_common_worker_mode_execute "${HADOOP_HDFS_HOME}/bin/hdfs" "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
# everything is in globals at this point, so call the generic handler
hadoop_generic_java_subcmd_handler

+ 258 - 0
hadoop-3.0.0/bin/hdfs.cmd

@ -0,0 +1,258 @@
@echo off
@rem Licensed to the Apache Software Foundation (ASF) under one or more
@rem contributor license agreements.  See the NOTICE file distributed with
@rem this work for additional information regarding copyright ownership.
@rem The ASF licenses this file to You under the Apache License, Version 2.0
@rem (the "License"); you may not use this file except in compliance with
@rem the License.  You may obtain a copy of the License at
@rem
@rem     http://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
setlocal enabledelayedexpansion
if not defined HADOOP_BIN_PATH ( 
  set HADOOP_BIN_PATH=%~dp0
)
if "%HADOOP_BIN_PATH:~-1%" == "\" (
  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
)
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
if not defined HADOOP_LIBEXEC_DIR (
  set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
)
call %HADOOP_LIBEXEC_DIR%\hdfs-config.cmd %*
if "%1" == "--config" (
  shift
  shift
)
if "%1" == "--loglevel" (
  shift
  shift
)
:main
  if exist %HADOOP_CONF_DIR%\hadoop-env.cmd (
    call %HADOOP_CONF_DIR%\hadoop-env.cmd
  )
  set hdfs-command=%1
  call :make_command_arguments %*
  if not defined hdfs-command (
      goto print_usage
  )
  if %hdfs-command% == classpath (
    if not defined hdfs-command-arguments (
      @rem No need to bother starting up a JVM for this simple case.
      @echo %CLASSPATH%
      exit /b
    )
  )
  set hdfscommands=dfs namenode secondarynamenode journalnode zkfc datanode dfsadmin haadmin fsck balancer jmxget oiv oev fetchdt getconf groups snapshotDiff lsSnapshottableDir cacheadmin mover storagepolicies classpath crypto dfsrouter dfsrouteradmin debug
  for %%i in ( %hdfscommands% ) do (
    if %hdfs-command% == %%i set hdfscommand=true
  )
  if defined hdfscommand (
    call :%hdfs-command%
  ) else (
    set CLASSPATH=%CLASSPATH%;%CD%
    set CLASS=%hdfs-command%
  )
  set java_arguments=%JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hdfs-command-arguments%
  call %JAVA% %java_arguments%
goto :eof
:namenode
  set CLASS=org.apache.hadoop.hdfs.server.namenode.NameNode
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_NAMENODE_OPTS%
  goto :eof
:journalnode
  set CLASS=org.apache.hadoop.hdfs.qjournal.server.JournalNode
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_JOURNALNODE_OPTS%
  goto :eof
:zkfc
  set CLASS=org.apache.hadoop.hdfs.tools.DFSZKFailoverController
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_ZKFC_OPTS%
  goto :eof
:secondarynamenode
  set CLASS=org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_SECONDARYNAMENODE_OPTS%
  goto :eof
:datanode
  set CLASS=org.apache.hadoop.hdfs.server.datanode.DataNode
  set HADOOP_OPTS=%HADOOP_OPTS% -server %HADOOP_DATANODE_OPTS%
  goto :eof
:dfs
  set CLASS=org.apache.hadoop.fs.FsShell
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:dfsadmin
  set CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:haadmin
  set CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:fsck
  set CLASS=org.apache.hadoop.hdfs.tools.DFSck
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:balancer
  set CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_BALANCER_OPTS%
  goto :eof
:jmxget
  set CLASS=org.apache.hadoop.hdfs.tools.JMXGet
  goto :eof
:classpath
  set CLASS=org.apache.hadoop.util.Classpath
  goto :eof
:oiv
  set CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewerPB
  goto :eof
:oev
  set CLASS=org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
  goto :eof
:fetchdt
  set CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
  goto :eof
:getconf
  set CLASS=org.apache.hadoop.hdfs.tools.GetConf
  goto :eof
:groups
  set CLASS=org.apache.hadoop.hdfs.tools.GetGroups
  goto :eof
:snapshotDiff
  set CLASS=org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff
  goto :eof
:lsSnapshottableDir
  set CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
  goto :eof
:cacheadmin
  set CLASS=org.apache.hadoop.hdfs.tools.CacheAdmin
  goto :eof
:mover
  set CLASS=org.apache.hadoop.hdfs.server.mover.Mover
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_MOVER_OPTS%
  goto :eof
:storagepolicies
  set CLASS=org.apache.hadoop.hdfs.tools.StoragePolicyAdmin
  goto :eof
:crypto
  set CLASS=org.apache.hadoop.hdfs.tools.CryptoAdmin
  goto :eof
:dfsrouter
  set CLASS=org.apache.hadoop.hdfs.server.federation.router.DFSRouter
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_ROUTER_OPTS%
  goto :eof
:dfsrouteradmin
  set CLASS=org.apache.hadoop.hdfs.tools.federation.RouterAdmin
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_ROUTER_OPTS%
  goto :eof
:debug
  set CLASS=org.apache.hadoop.hdfs.tools.DebugAdmin
  goto :eof
@rem This changes %1, %2 etc. Hence those cannot be used after calling this.
:make_command_arguments
  if "%1" == "--config" (
    shift
    shift
  )
  if "%1" == "--loglevel" (
    shift
    shift
  )
  if [%2] == [] goto :eof
  shift
  set _hdfsarguments=
  :MakeCmdArgsLoop 
  if [%1]==[] goto :EndLoop 
  if not defined _hdfsarguments (
    set _hdfsarguments=%1
  ) else (
    set _hdfsarguments=!_hdfsarguments! %1
  )
  shift
  goto :MakeCmdArgsLoop 
  :EndLoop 
  set hdfs-command-arguments=%_hdfsarguments%
  goto :eof
:print_usage
  @echo Usage: hdfs [--config confdir] [--loglevel loglevel] COMMAND
  @echo        where COMMAND is one of:
  @echo   dfs                  run a filesystem command on the file systems supported in Hadoop.
  @echo   namenode -format     format the DFS filesystem
  @echo   secondarynamenode    run the DFS secondary namenode
  @echo   namenode             run the DFS namenode
  @echo   journalnode          run the DFS journalnode
  @echo   dfsrouter            run the DFS router
  @echo   dfsrouteradmin       manage Router-based federation
  @echo   zkfc                 run the ZK Failover Controller daemon
  @echo   datanode             run a DFS datanode
  @echo   dfsadmin             run a DFS admin client
  @echo   haadmin              run a DFS HA admin client
  @echo   fsck                 run a DFS filesystem checking utility
  @echo   balancer             run a cluster balancing utility
  @echo   jmxget               get JMX exported values from NameNode or DataNode.
  @echo   oiv                  apply the offline fsimage viewer to an fsimage
  @echo   oev                  apply the offline edits viewer to an edits file
  @echo   fetchdt              fetch a delegation token from the NameNode
  @echo   getconf              get config values from configuration
  @echo   groups               get the groups which users belong to
  @echo   snapshotDiff         diff two snapshots of a directory or diff the
  @echo                        current directory contents with a snapshot
  @echo   lsSnapshottableDir   list all snapshottable dirs owned by the current user
  @echo 						Use -help to see options
  @echo   cacheadmin           configure the HDFS cache
  @echo   crypto               configure HDFS encryption zones
  @echo   mover                run a utility to move block replicas across storage types
  @echo   storagepolicies      list/get/set block storage policies
  @echo.
  @echo Most commands print help when invoked w/o parameters.
@rem There are also debug commands, but they don't show up in this listing.
endlocal

BIN
hadoop-3.0.0/bin/hdfs.dll


BIN
hadoop-3.0.0/bin/hdfs.exp


BIN
hadoop-3.0.0/bin/hdfs.lib


BIN
hadoop-3.0.0/bin/hdfs.pdb


BIN
hadoop-3.0.0/bin/libwinutils.lib


+ 165 - 0
hadoop-3.0.0/bin/mapred

@ -0,0 +1,165 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description  build up the mapred command's usage text.
## @audience     public
## @stability    stable
## @replaceable  no
function hadoop_usage
{
  hadoop_add_subcommand "classpath" client "prints the class path needed for running mapreduce subcommands"
  hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
  hadoop_add_subcommand "historyserver" daemon "run job history servers as a standalone daemon"
  hadoop_add_subcommand "hsadmin" admin "job history server admin interface"
  hadoop_add_subcommand "job" client "manipulate MapReduce jobs"
  hadoop_add_subcommand "pipes" client "run a Pipes job"
  hadoop_add_subcommand "queue" client "get information regarding JobQueues"
  hadoop_add_subcommand "sampler" client "sampler"
  hadoop_add_subcommand "version" client "print the version"
  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
## @description  Default command handler for hadoop command
## @audience     public
## @stability    stable
## @replaceable  no
## @param        CLI arguments
function mapredcmd_case
{
  subcmd=$1
  shift
  case ${subcmd} in
    mradmin|jobtracker|tasktracker|groups)
      hadoop_error "Sorry, the ${subcmd} command is no longer supported."
      hadoop_error "You may find similar functionality with the \"yarn\" shell command."
      hadoop_exit_with_usage 1
    ;;
    classpath)
      hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
    ;;
    envvars)
      echo "JAVA_HOME='${JAVA_HOME}'"
      echo "HADOOP_MAPRED_HOME='${HADOOP_MAPRED_HOME}'"
      echo "MAPRED_DIR='${MAPRED_DIR}'"
      echo "MAPRED_LIB_JARS_DIR='${MAPRED_LIB_JARS_DIR}'"
      echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
      echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
      echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
      echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
      exit 0
    ;;
    historyserver)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
      if [[ -n "${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}" ]]; then
        HADOOP_HEAPSIZE_MAX="${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}"
      fi
      HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_JHS_LOGGER:-$HADOOP_DAEMON_ROOT_LOGGER}
      if [[  "${HADOOP_DAEMON_MODE}" != "default" ]]; then
        hadoop_add_param HADOOP_OPTS mapred.jobsummary.logger "-Dmapred.jobsummary.logger=${HADOOP_DAEMON_ROOT_LOGGER}"
      fi
    ;;
    hsadmin)
      HADOOP_CLASSNAME=org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin
    ;;
    job)
      HADOOP_CLASSNAME=org.apache.hadoop.mapred.JobClient
    ;;
    pipes)
      HADOOP_CLASSNAME=org.apache.hadoop.mapred.pipes.Submitter
    ;;
    queue)
      HADOOP_CLASSNAME=org.apache.hadoop.mapred.JobQueueClient
    ;;
    sampler)
      HADOOP_CLASSNAME=org.apache.hadoop.mapred.lib.InputSampler
    ;;
    version)
      HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
    ;;
    *)
      HADOOP_CLASSNAME="${subcmd}"
      if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
        hadoop_exit_with_usage 1
      fi
    ;;
  esac
}
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_HOME}" ]]; then
  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
else
  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
HADOOP_NEW_CONFIG=true
if [[ -f "${HADOOP_LIBEXEC_DIR}/mapred-config.sh" ]]; then
  # shellcheck source=./hadoop-mapreduce-project/bin/mapred-config.sh
  . "${HADOOP_LIBEXEC_DIR}/mapred-config.sh"
else
  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/mapred-config.sh." 2>&1
  exit 1
fi
# now that we have support code, let's abs MYNAME so we can use it later
MYNAME=$(hadoop_abs "${MYNAME}")
if [ $# = 0 ]; then
  hadoop_exit_with_usage 1
fi
HADOOP_SUBCMD=$1
shift
if hadoop_need_reexec mapred "${HADOOP_SUBCMD}"; then
  hadoop_uservar_su mapred "${HADOOP_SUBCMD}" \
    "${MYNAME}" \
    "--reexec" \
    "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
HADOOP_SUBCMD_ARGS=("$@")
if declare -f mapred_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
  hadoop_debug "Calling dynamically: mapred_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
  "mapred_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
else
  mapredcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
fi
hadoop_add_client_opts
if [[ ${HADOOP_WORKER_MODE} = true ]]; then
  hadoop_common_worker_mode_execute "${HADOOP_MAPRED_HOME}/bin/mapred" "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
# everything is in globals at this point, so call the generic handler
hadoop_generic_java_subcmd_handler

+ 217 - 0
hadoop-3.0.0/bin/mapred.cmd

@ -0,0 +1,217 @@
@echo off
@rem Licensed to the Apache Software Foundation (ASF) under one or more
@rem contributor license agreements.  See the NOTICE file distributed with
@rem this work for additional information regarding copyright ownership.
@rem The ASF licenses this file to You under the Apache License, Version 2.0
@rem (the "License"); you may not use this file except in compliance with
@rem the License.  You may obtain a copy of the License at
@rem
@rem     http://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem The Hadoop mapred command script
setlocal enabledelayedexpansion
if not defined HADOOP_BIN_PATH ( 
  set HADOOP_BIN_PATH=%~dp0
)
if "%HADOOP_BIN_PATH:~`%" == "\" (
  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
)
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
if not defined HADOOP_LIBEXEC_DIR (
  set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
)
call %DEFAULT_LIBEXEC_DIR%\mapred-config.cmd %*
if "%1" == "--config" (
  shift
  shift
)
if "%1" == "--loglevel" (
  shift
  shift
)
:main
  if exist %MAPRED_CONF_DIR%\mapred-env.cmd (
    call %MAPRED_CONF_DIR%\mapred-env.cmd
  )
  set mapred-command=%1
  call :make_command_arguments %*
  if not defined mapred-command (
    goto print_usage
  )
  @rem JAVA and JAVA_HEAP_MAX are set in hadoop-confg.cmd
  if defined MAPRED_HEAPSIZE (
    @rem echo run with Java heapsize %MAPRED_HEAPSIZE%
    set JAVA_HEAP_SIZE=-Xmx%MAPRED_HEAPSIZE%m
  )
  @rem CLASSPATH initially contains HADOOP_CONF_DIR and MAPRED_CONF_DIR
  if not defined HADOOP_CONF_DIR (
    echo NO HADOOP_CONF_DIR set.
    echo Please specify it either in mapred-env.cmd or in the environment.
    goto :eof
  )
  set CLASSPATH=%HADOOP_CONF_DIR%;%MAPRED_CONF_DIR%;%CLASSPATH%
  @rem for developers, add Hadoop classes to CLASSPATH
  if exist %HADOOP_MAPRED_HOME%\build\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build\classes
  )
  if exist %HADOOP_MAPRED_HOME%\build\webapps (
    set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build
  )
  if exist %HADOOP_MAPRED_HOME%\build\test\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build\test\classes
  )
  if exist %HADOOP_MAPRED_HOME%\build\tools (
    set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build\tools
  )
  @rem Need YARN jars also
  set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\*
  @rem add libs to CLASSPATH
  set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR%\*
  @rem add modules to CLASSPATH
  set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\modules\*
  if %mapred-command% == classpath (
    if not defined mapred-command-arguments (
      @rem No need to bother starting up a JVM for this simple case.
      @echo %CLASSPATH%
      exit /b
    )
  )
  call :%mapred-command% %mapred-command-arguments%
  set java_arguments=%JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %mapred-command-arguments%
  call %JAVA% %java_arguments%
goto :eof
:classpath
  set CLASS=org.apache.hadoop.util.Classpath
  goto :eof
:job
  set CLASS=org.apache.hadoop.mapred.JobClient
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:queue
  set CLASS=org.apache.hadoop.mapred.JobQueueClient
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:sampler
  set CLASS=org.apache.hadoop.mapred.lib.InputSampler
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:historyserver
  set CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
  set HADOOP_OPTS=%HADOOP_OPTS% -Dmapred.jobsummary.logger=%HADOOP_JHS_LOGGER% %HADOOP_JOB_HISTORYSERVER_OPTS%
  if defined HADOOP_JOB_HISTORYSERVER_HEAPSIZE (
    set JAVA_HEAP_MAX=-Xmx%HADOOP_JOB_HISTORYSERVER_HEAPSIZE%m
  )
  goto :eof
:distcp
  set CLASS=org.apache.hadoop.tools.DistCp
  set CLASSPATH=%CLASSPATH%;%TOO_PATH%
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
  goto :eof
:archive
  set CLASS=org.apache.hadop.tools.HadoopArchives
  set CLASSPATH=%CLASSPATH%;%TOO_PATH%
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
:hsadmin
  set CLASS=org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin
  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
:pipes
  goto not_supported
:mradmin
  goto not_supported
:jobtracker
  goto not_supported
:tasktracker
  goto not_supported
:groups
  goto not_supported
@rem This changes %1, %2 etc. Hence those cannot be used after calling this.
:make_command_arguments
  if [%2] == [] goto :eof
  if "%1" == "--config" (
    shift
    shift
  )
  if "%1" == "--loglevel" (
    shift
    shift
  )
  shift
  set _mapredarguments=
  :MakeCmdArgsLoop 
  if [%1]==[] goto :EndLoop 
  if not defined _mapredarguments (
    set _mapredarguments=%1
  ) else (
    set _mapredarguments=!_mapredarguments! %1
  )
  shift
  goto :MakeCmdArgsLoop 
  :EndLoop 
  set mapred-command-arguments=%_mapredarguments%
  goto :eof
:not_supported
  @echo Sorry, the %COMMAND% command is no longer supported.
  @echo You may find similar functionality with the "yarn" shell command.
  goto print_usage
:print_usage
  @echo Usage: mapred [--config confdir] [--loglevel loglevel] COMMAND
  @echo        where COMMAND is one of:
  @echo   job                  manipulate MapReduce jobs
  @echo   queue                get information regarding JobQueues
  @echo   classpath            prints the class path needed for running
  @echo                        mapreduce subcommands
  @echo   historyserver        run job history servers as a standalone daemon
  @echo   distcp ^<srcurl^> ^<desturl^> copy file or directories recursively
  @echo   archive -archiveName NAME -p ^<parent path^> ^<src^>* ^<dest^> create a hadoop archive
  @echo   hsadmin              job history server admin interface
  @echo 
  @echo Most commands print help when invoked w/o parameters.
endlocal

BIN
hadoop-3.0.0/bin/winutils.exe


BIN
hadoop-3.0.0/bin/winutils.pdb


+ 278 - 0
hadoop-3.0.0/bin/yarn

@ -0,0 +1,278 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description  build up the yarn command's usage text.
## @audience     public
## @stability    stable
## @replaceable  no
function hadoop_usage
{
  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
  hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
  hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in worker mode"
  hadoop_add_option "--loglevel level" "set the log4j level for this command"
  hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
  hadoop_add_option "--workers" "turn on worker mode"
  hadoop_add_subcommand "application" client "prints application(s) report/kill application"
  hadoop_add_subcommand "applicationattempt" client "prints applicationattempt(s) report"
  hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
  hadoop_add_subcommand "cluster" client "prints cluster information"
  hadoop_add_subcommand "container" client "prints container(s) report"
  hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon"
  hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
  hadoop_add_subcommand "jar <jar>" client "run a jar file"
  hadoop_add_subcommand "logs" client "dump container logs"
  hadoop_add_subcommand "node" admin "prints node report(s)"
  hadoop_add_subcommand "nodemanager" daemon "run a nodemanager on each worker"
  hadoop_add_subcommand "proxyserver" daemon "run the web app proxy server"
  hadoop_add_subcommand "queue" client "prints queue information"
  hadoop_add_subcommand "resourcemanager" daemon "run the ResourceManager"
  hadoop_add_subcommand "rmadmin" admin "admin tools"
  hadoop_add_subcommand "router" daemon "run the Router daemon"
  hadoop_add_subcommand "schedulerconf" client "Updates scheduler configuration"
  hadoop_add_subcommand "scmadmin" admin "SharedCacheManager admin tools"
  hadoop_add_subcommand "sharedcachemanager" daemon "run the SharedCacheManager daemon"
  hadoop_add_subcommand "timelinereader" client "run the timeline reader server"
  hadoop_add_subcommand "timelineserver" daemon "run the timeline server"
  hadoop_add_subcommand "top" client "view cluster information"
  hadoop_add_subcommand "version" client "print the version"
  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
## @description  Default command handler for yarn command
## @audience     public
## @stability    stable
## @replaceable  no
## @param        CLI arguments
function yarncmd_case
{
  subcmd=$1
  shift
  case ${subcmd} in
    application|applicationattempt|container)
      HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ApplicationCLI
      set -- "${subcmd}" "$@"
      HADOOP_SUBCMD_ARGS=("$@")
    ;;
    classpath)
      hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
    ;;
    cluster)
      HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ClusterCLI
    ;;
    daemonlog)
      HADOOP_CLASSNAME=org.apache.hadoop.log.LogLevel
    ;;
    envvars)
      echo "JAVA_HOME='${JAVA_HOME}'"
      echo "HADOOP_YARN_HOME='${HADOOP_YARN_HOME}'"
      echo "YARN_DIR='${YARN_DIR}'"
      echo "YARN_LIB_JARS_DIR='${YARN_LIB_JARS_DIR}'"
      echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
      echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
      echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
      echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
      exit 0
    ;;
    jar)
      HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar
    ;;
    historyserver)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      echo "DEPRECATED: Use of this command to start the timeline server is deprecated." 1>&2
      echo "Instead use the timelineserver command for it." 1>&2
      echo "Starting the History Server anyway..." 1>&2
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
    ;;
    logs)
      HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.LogsCLI
    ;;
    node)
      HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.NodeCLI
    ;;
    nodemanager)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
      # Backwards compatibility
      if [[ -n "${YARN_NODEMANAGER_HEAPSIZE}" ]]; then
        HADOOP_HEAPSIZE_MAX="${YARN_NODEMANAGER_HEAPSIZE}"
      fi
    ;;
    proxyserver)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer'
      # Backwards compatibility
      if [[ -n "${YARN_PROXYSERVER_HEAPSIZE}" ]]; then
        HADOOP_HEAPSIZE_MAX="${YARN_PROXYSERVER_HEAPSIZE}"
      fi
    ;;
    queue)
      HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.QueueCLI
    ;;
    resourcemanager)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.resourcemanager.ResourceManager'
      # Backwards compatibility
      if [[ -n "${YARN_RESOURCEMANAGER_HEAPSIZE}" ]]; then
        HADOOP_HEAPSIZE_MAX="${YARN_RESOURCEMANAGER_HEAPSIZE}"
      fi
    ;;
    rmadmin)
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.cli.RMAdminCLI'
    ;;
    router)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.router.Router'
    ;;
    schedulerconf)
    HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.cli.SchedConfCLI'
    ;;
    scmadmin)
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.SCMAdmin'
    ;;
    sharedcachemanager)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
    ;;
    timelinereader)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'
    ;;
    timelineserver)
      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
      HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
      # Backwards compatibility
      if [[ -n "${YARN_TIMELINESERVER_HEAPSIZE}" ]]; then
        HADOOP_HEAPSIZE_MAX="${YARN_TIMELINESERVER_HEAPSIZE}"
      fi
    ;;
    version)
      HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
    ;;
    top)
      doNotSetCols=0
      doNotSetRows=0
      for i in "$@"; do
        if [[ $i == "-cols" ]]; then
          doNotSetCols=1
        fi
        if [[ $i == "-rows" ]]; then
          doNotSetRows=1
        fi
      done
      if [ $doNotSetCols == 0 ] && [ -n "${TERM}" ]; then
        cols=$(tput cols)
        if [ -n "$cols" ]; then
          args=( $@ )
          args=("${args[@]}" "-cols" "$cols")
          set -- "${args[@]}"
        fi
      fi
      if [ $doNotSetRows == 0 ] && [ -n "${TERM}" ]; then
        rows=$(tput lines)
        if [ -n "$rows" ]; then
          args=( $@ )
          args=("${args[@]}" "-rows" "$rows")
          set -- "${args[@]}"
        fi
      fi
      HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.TopCLI
      HADOOP_SUBCMD_ARGS=("$@")
    ;;
    *)
      HADOOP_CLASSNAME="${subcmd}"
      if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
        hadoop_exit_with_usage 1
      fi
    ;;
  esac
}
# let's locate libexec...
if [[ -n "${HADOOP_HOME}" ]]; then
  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
else
  bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
HADOOP_NEW_CONFIG=true
if [[ -f "${HADOOP_LIBEXEC_DIR}/yarn-config.sh" ]]; then
  # shellcheck source=./hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh
  . "${HADOOP_LIBEXEC_DIR}/yarn-config.sh"
else
  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/yarn-config.sh." 2>&1
  exit 1
fi
# now that we have support code, let's abs MYNAME so we can use it later
MYNAME=$(hadoop_abs "${MYNAME}")
# if no args specified, show usage
if [[ $# = 0 ]]; then
  hadoop_exit_with_usage 1
fi
# get arguments
HADOOP_SUBCMD=$1
shift
if hadoop_need_reexec yarn "${HADOOP_SUBCMD}"; then
  hadoop_uservar_su yarn "${HADOOP_SUBCMD}" \
    "${MYNAME}" \
    "--reexec" \
    "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
HADOOP_SUBCMD_ARGS=("$@")
if declare -f yarn_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
  hadoop_debug "Calling dynamically: yarn_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
  "yarn_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
else
  yarncmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
fi
# It's unclear if YARN_CLIENT_OPTS is actually a useful
# thing to have separate from HADOOP_CLIENT_OPTS.  Someone
# might use it, so let's not deprecate it and just override
# HADOOP_CLIENT_OPTS instead before we (potentially) add it
# to the command line
if [[ -n "${YARN_CLIENT_OPTS}" ]]; then
  HADOOP_CLIENT_OPTS=${YARN_CLIENT_OPTS}
fi
hadoop_add_client_opts
if [[ ${HADOOP_WORKER_MODE} = true ]]; then
  hadoop_common_worker_mode_execute "${HADOOP_YARN_HOME}/bin/yarn" "${HADOOP_USER_PARAMS[@]}"
  exit $?
fi
hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
# everything is in globals at this point, so call the generic handler
hadoop_generic_java_subcmd_handler

+ 356 - 0
hadoop-3.0.0/bin/yarn.cmd

@ -0,0 +1,356 @@
@echo off
@rem Licensed to the Apache Software Foundation (ASF) under one or more
@rem contributor license agreements.  See the NOTICE file distributed with
@rem this work for additional information regarding copyright ownership.
@rem The ASF licenses this file to You under the Apache License, Version 2.0
@rem (the "License"); you may not use this file except in compliance with
@rem the License.  You may obtain a copy of the License at
@rem
@rem     http://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem The Hadoop command script
@rem
@rem Environment Variables
@rem
@rem   JAVA_HOME            The java implementation to use.  Overrides JAVA_HOME.
@rem
@rem   YARN_CLASSPATH       Extra Java CLASSPATH entries.
@rem
@rem   YARN_HEAPSIZE        The maximum amount of heap to use, in MB.
@rem                        Default is 1000.
@rem
@rem   YARN_{COMMAND}_HEAPSIZE overrides YARN_HEAPSIZE for a given command
@rem                           eg YARN_NODEMANAGER_HEAPSIZE sets the heap
@rem                           size for the NodeManager.  If you set the
@rem                           heap size in YARN_{COMMAND}_OPTS or YARN_OPTS
@rem                           they take precedence.
@rem
@rem   YARN_OPTS            Extra Java runtime options.
@rem
@rem   YARN_CLIENT_OPTS     when the respective command is run.
@rem   YARN_{COMMAND}_OPTS etc  YARN_NODEMANAGER_OPTS applies to NodeManager
@rem                              for e.g.  YARN_CLIENT_OPTS applies to
@rem                              more than one command (fs, dfs, fsck,
@rem                              dfsadmin etc)
@rem
@rem   YARN_CONF_DIR        Alternate conf dir. Default is ${HADOOP_YARN_HOME}/conf.
@rem
@rem   YARN_ROOT_LOGGER     The root appender. Default is INFO,console
@rem
setlocal enabledelayedexpansion
if not defined HADOOP_BIN_PATH ( 
  set HADOOP_BIN_PATH=%~dp0
)
if "%HADOOP_BIN_PATH:~-1%" == "\" (
  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
)
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
if not defined HADOOP_LIBEXEC_DIR (
  set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
)
call %DEFAULT_LIBEXEC_DIR%\yarn-config.cmd %*
if "%1" == "--config" (
  shift
  shift
)
if "%1" == "--loglevel" (
  shift
  shift
)
:main
  if exist %YARN_CONF_DIR%\yarn-env.cmd (
    call %YARN_CONF_DIR%\yarn-env.cmd
  )
  set yarn-command=%1
  call :make_command_arguments %*
  if not defined yarn-command (
      goto print_usage
  )
  @rem JAVA and JAVA_HEAP_MAX and set in hadoop-config.cmd
  if defined YARN_HEAPSIZE (
    @rem echo run with Java heapsize %YARN_HEAPSIZE%
    set JAVA_HEAP_MAX=-Xmx%YARN_HEAPSIZE%m
  )
  @rem CLASSPATH initially contains HADOOP_CONF_DIR & YARN_CONF_DIR
  if not defined HADOOP_CONF_DIR (
    echo No HADOOP_CONF_DIR set. 
    echo Please specify it either in yarn-env.cmd or in the environment.
    goto :eof
  )
  set CLASSPATH=%HADOOP_CONF_DIR%;%YARN_CONF_DIR%;%CLASSPATH%
  @rem for developers, add Hadoop classes to CLASSPATH
  if exist %HADOOP_YARN_HOME%\yarn-api\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-api\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-common\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-common\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-mapreduce\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-mapreduce\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-master-worker\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-master-worker\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-nodemanager\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-nodemanager\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-common\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-common\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-resourcemanager\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-resourcemanager\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-applicationhistoryservice\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-applicationhistoryservice\target\classes
  )
  if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-router\target\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-router\target\classes
  )
  if exist %HADOOP_YARN_HOME%\build\test\classes (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\build\test\classes
  )
  if exist %HADOOP_YARN_HOME%\build\tools (
    set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\build\tools
  )
  set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\*
  set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR%\*
  if %yarn-command% == classpath (
    if not defined yarn-command-arguments (
      @rem No need to bother starting up a JVM for this simple case.
      @echo %CLASSPATH%
      exit /b
    )
  )
  set yarncommands=resourcemanager nodemanager proxyserver rmadmin version jar ^
     application applicationattempt container node queue logs daemonlog historyserver ^
     timelineserver timelinereader router classpath
  for %%i in ( %yarncommands% ) do (
    if %yarn-command% == %%i set yarncommand=true
  )
  if defined yarncommand (
    call :%yarn-command%
  ) else (
    set CLASSPATH=%CLASSPATH%;%CD%
    set CLASS=%yarn-command%
  )
  if defined JAVA_LIBRARY_PATH (
    set YARN_OPTS=%YARN_OPTS% -Djava.library.path=%JAVA_LIBRARY_PATH%
  )
  set java_arguments=%JAVA_HEAP_MAX% %YARN_OPTS% -classpath %CLASSPATH% %CLASS% %yarn-command-arguments%
  call %JAVA% %java_arguments%
goto :eof
:classpath
  set CLASS=org.apache.hadoop.util.Classpath
  goto :eof
:rmadmin
  set CLASS=org.apache.hadoop.yarn.client.cli.RMAdminCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:application
  set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
  goto :eof
:applicationattempt
  set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
  goto :eof
:cluster
  set CLASS=org.apache.hadoop.yarn.client.cli.ClusterCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:container
  set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
  goto :eof  
:node
  set CLASS=org.apache.hadoop.yarn.client.cli.NodeCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:queue
  set CLASS=org.apache.hadoop.yarn.client.cli.QueueCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:resourcemanager
  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\rm-config\log4j.properties
  set CLASS=org.apache.hadoop.yarn.server.resourcemanager.ResourceManager
  set YARN_OPTS=%YARN_OPTS% %YARN_RESOURCEMANAGER_OPTS%
  if defined YARN_RESOURCEMANAGER_HEAPSIZE (
    set JAVA_HEAP_MAX=-Xmx%YARN_RESOURCEMANAGER_HEAPSIZE%m
  )
  goto :eof
:historyserver
  @echo DEPRECATED: Use of this command to start the timeline server is deprecated. 1>&2
  @echo Instead use the timelineserver command for it. 1>&2
  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\ahs-config\log4j.properties
  set CLASS=org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer
  set YARN_OPTS=%YARN_OPTS% %HADOOP_HISTORYSERVER_OPTS%
  if defined YARN_HISTORYSERVER_HEAPSIZE (
    set JAVA_HEAP_MAX=-Xmx%YARN_HISTORYSERVER_HEAPSIZE%m
  )
  goto :eof
:timelineserver
  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\timelineserver-config\log4j.properties
  set CLASS=org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer
  set YARN_OPTS=%YARN_OPTS% %HADOOP_TIMELINESERVER_OPTS%
  if defined YARN_TIMELINESERVER_HEAPSIZE (
    set JAVA_HEAP_MAX=-Xmx%YARN_TIMELINESERVER_HEAPSIZE%m
  )
  goto :eof
:timelinereader
  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\timelineserver-config\log4j.properties
  set CLASS=org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer
  set YARN_OPTS=%YARN_OPTS% %YARN_TIMELINEREADER_OPTS%
  goto :eof
:router
  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\router-config\log4j.properties
  set CLASS=org.apache.hadoop.yarn.server.router.Router
  set YARN_OPTS=%YARN_OPTS% %HADOOP_ROUTER_OPTS%
  goto :eof
:nodemanager
  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\nm-config\log4j.properties
  set CLASS=org.apache.hadoop.yarn.server.nodemanager.NodeManager
  set YARN_OPTS=%YARN_OPTS% -server %HADOOP_NODEMANAGER_OPTS%
  if defined YARN_NODEMANAGER_HEAPSIZE (
    set JAVA_HEAP_MAX=-Xmx%YARN_NODEMANAGER_HEAPSIZE%m
  )
  goto :eof
:proxyserver
  set CLASS=org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer
  set YARN_OPTS=%YARN_OPTS% %HADOOP_PROXYSERVER_OPTS%
  if defined YARN_PROXYSERVER_HEAPSIZE (
    set JAVA_HEAP_MAX=-Xmx%YARN_PROXYSERVER_HEAPSIZE%m
  )
  goto :eof
:version
  set CLASS=org.apache.hadoop.util.VersionInfo
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:jar
  set CLASS=org.apache.hadoop.util.RunJar
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:logs
  set CLASS=org.apache.hadoop.yarn.client.cli.LogsCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:daemonlog
  set CLASS=org.apache.hadoop.log.LogLevel
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
:schedulerconf
  set CLASS=org.apache.hadoop.yarn.client.cli.SchedConfCLI
  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
  goto :eof
@rem This changes %1, %2 etc. Hence those cannot be used after calling this.
:make_command_arguments
  if "%1" == "--config" (
    shift
    shift
  )
  if "%1" == "--loglevel" (
    shift
    shift
  )
  if [%2] == [] goto :eof
  shift
  set _yarnarguments=
  :MakeCmdArgsLoop 
  if [%1]==[] goto :EndLoop 
  if not defined _yarnarguments (
    set _yarnarguments=%1
  ) else (
    set _yarnarguments=!_yarnarguments! %1
  )
  shift
  goto :MakeCmdArgsLoop 
  :EndLoop 
  set yarn-command-arguments=%_yarnarguments%
  goto :eof
:print_usage
  @echo Usage: yarn [--config confdir] [--loglevel loglevel] COMMAND
  @echo        where COMMAND is one of:
  @echo   resourcemanager      run the ResourceManager
  @echo   nodemanager          run a nodemanager on each slave
  @echo   router               run the Router daemon
  @echo   timelineserver       run the timeline server
  @echo   timelinereader       run the timeline reader server
  @echo   rmadmin              admin tools
  @echo   version              print the version
  @echo   jar ^<jar^>          run a jar file
  @echo   application          prints application(s) report/kill application
  @echo   applicationattempt   prints applicationattempt(s) report
  @echo   cluster              prints cluster information
  @echo   container            prints container(s) report
  @echo   node                 prints node report(s)
  @echo   queue                prints queue information
  @echo   logs                 dump container logs
  @echo   schedulerconf        updates scheduler configuration
  @echo   classpath            prints the class path needed to get the
  @echo                        Hadoop jar and the required libraries
  @echo   daemonlog            get/set the log level for each daemon
  @echo   or
  @echo   CLASSNAME            run the class named CLASSNAME
  @echo Most commands print help when invoked w/o parameters.
endlocal