123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256 |
- @echo off
- @rem Licensed to the Apache Software Foundation (ASF) under one or more
- @rem contributor license agreements. See the NOTICE file distributed with
- @rem this work for additional information regarding copyright ownership.
- @rem The ASF licenses this file to You under the Apache License, Version 2.0
- @rem (the "License"); you may not use this file except in compliance with
- @rem the License. You may obtain a copy of the License at
- @rem
- @rem http://www.apache.org/licenses/LICENSE-2.0
- @rem
- @rem Unless required by applicable law or agreed to in writing, software
- @rem distributed under the License is distributed on an "AS IS" BASIS,
- @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- @rem See the License for the specific language governing permissions and
- @rem limitations under the License.
- @rem
- setlocal enabledelayedexpansion
- if not defined HADOOP_BIN_PATH (
- set HADOOP_BIN_PATH=%~dp0
- )
- if "%HADOOP_BIN_PATH:~-1%" == "\" (
- set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
- )
- @rem Determine log file name.
- @rem If we're being called by --service we need to use %2.
- @rem If we're being called with --config we need to use %3.
- @rem Otherwise use %1
- if "%1" == "--config" (
- set HADOOP_LOGFILE=hadoop-%3-%computername%.log
- ) else if "%2" == "" (
- set HADOOP_LOGFILE=hadoop-%1-%computername%.log
- ) else (
- set HADOOP_LOGFILE=hadoop-%2-%computername%.log
- )
- @rem if running as a service, log to (daily rolling) files instead of console
- if "%1" == "--service" (
- if not defined HADOOP_ROOT_LOGGER (
- set HADOOP_ROOT_LOGGER=INFO,DRFA
- )
- )
- set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
- if not defined HADOOP_LIBEXEC_DIR (
- set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
- )
- call %HADOOP_LIBEXEC_DIR%\hdfs-config.cmd %*
- if "%1" == "--config" (
- shift
- shift
- )
- if "%1" == "--service" (
- set service_entry=true
- shift
- )
- :main
- if exist %HADOOP_CONF_DIR%\hadoop-env.cmd (
- call %HADOOP_CONF_DIR%\hadoop-env.cmd
- )
- set hdfs-command=%1
- call :make_command_arguments %*
- if not defined hdfs-command (
- goto print_usage
- )
- set hdfscommands=dfs namenode secondarynamenode journalnode zkfc datanode dfsadmin haadmin fsck balancer jmxget oiv oev fetchdt getconf groups snapshotDiff lsSnapshottableDir cacheadmin mover storagepolicies
- for %%i in ( %hdfscommands% ) do (
- if %hdfs-command% == %%i set hdfscommand=true
- )
- if defined hdfscommand (
- call :%hdfs-command%
- ) else (
- set CLASSPATH=%CLASSPATH%;%CD%
- set CLASS=%hdfs-command%
- )
- set java_arguments=%JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hdfs-command-arguments%
- if defined service_entry (
- call :makeServiceXml %java_arguments%
- ) else (
- call %JAVA% %java_arguments%
- )
- goto :eof
- :namenode
- set CLASS=org.apache.hadoop.hdfs.server.namenode.NameNode
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_NAMENODE_OPTS%
- goto :eof
- :journalnode
- set CLASS=org.apache.hadoop.hdfs.qjournal.server.JournalNode
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_JOURNALNODE_OPTS%
- goto :eof
- :zkfc
- set CLASS=org.apache.hadoop.hdfs.tools.DFSZKFailoverController
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_ZKFC_OPTS%
- goto :eof
- :secondarynamenode
- set CLASS=org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_SECONDARYNAMENODE_OPTS%
- goto :eof
- :datanode
- set CLASS=org.apache.hadoop.hdfs.server.datanode.DataNode
- set HADOOP_OPTS=%HADOOP_OPTS% -server %HADOOP_DATANODE_OPTS%
- goto :eof
- :dfs
- set CLASS=org.apache.hadoop.fs.FsShell
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
- goto :eof
- :dfsadmin
- set CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
- goto :eof
- :haadmin
- set CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
- set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
- goto :eof
- :fsck
- set CLASS=org.apache.hadoop.hdfs.tools.DFSck
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
- goto :eof
- :balancer
- set CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_BALANCER_OPTS%
- goto :eof
- :jmxget
- set CLASS=org.apache.hadoop.hdfs.tools.JMXGet
- goto :eof
- :oiv
- set CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewerPB
- goto :eof
- :oev
- set CLASS=org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
- goto :eof
- :fetchdt
- set CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
- goto :eof
- :getconf
- set CLASS=org.apache.hadoop.hdfs.tools.GetConf
- goto :eof
- :groups
- set CLASS=org.apache.hadoop.hdfs.tools.GetGroups
- goto :eof
- :snapshotDiff
- set CLASS=org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff
- goto :eof
- :lsSnapshottableDir
- set CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
- goto :eof
- :cacheadmin
- set CLASS=org.apache.hadoop.hdfs.tools.CacheAdmin
- goto :eof
- :makeServiceXml
- set arguments=%*
- @echo ^<service^>
- @echo ^<id^>%hdfs-command%^</id^>
- @echo ^<name^>%hdfs-command%^</name^>
- @echo ^<description^>This service runs Hadoop %hdfs-command%^</description^>
- @echo ^<executable^>%JAVA%^</executable^>
- @echo ^<arguments^>%arguments%^</arguments^>
- @echo ^</service^>
- :mover
- set CLASS=org.apache.hadoop.hdfs.server.mover.Mover
- set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_MOVER_OPTS%
- goto :eof
- :storagepolicies
- set CLASS=org.apache.hadoop.hdfs.tools.GetStoragePolicies
- goto :eof
- @rem This changes %1, %2 etc. Hence those cannot be used after calling this.
- :make_command_arguments
- if [%2] == [] goto :eof
- if "%1" == "--config" (
- shift
- shift
- )
- if "%1" == "--service" (
- shift
- )
- shift
- set _hdfsarguments=
- :MakeCmdArgsLoop
- if [%1]==[] goto :EndLoop
- if not defined _hdfsarguments (
- set _hdfsarguments=%1
- ) else (
- set _hdfsarguments=!_hdfsarguments! %1
- )
- shift
- goto :MakeCmdArgsLoop
- :EndLoop
- set hdfs-command-arguments=%_hdfsarguments%
- goto :eof
- :print_usage
- @echo Usage: hdfs [--config confdir] COMMAND
- @echo where COMMAND is one of:
- @echo dfs run a filesystem command on the file systems supported in Hadoop.
- @echo namenode -format format the DFS filesystem
- @echo secondarynamenode run the DFS secondary namenode
- @echo namenode run the DFS namenode
- @echo journalnode run the DFS journalnode
- @echo zkfc run the ZK Failover Controller daemon
- @echo datanode run a DFS datanode
- @echo dfsadmin run a DFS admin client
- @echo haadmin run a DFS HA admin client
- @echo fsck run a DFS filesystem checking utility
- @echo balancer run a cluster balancing utility
- @echo jmxget get JMX exported values from NameNode or DataNode.
- @echo oiv apply the offline fsimage viewer to an fsimage
- @echo oev apply the offline edits viewer to an edits file
- @echo fetchdt fetch a delegation token from the NameNode
- @echo getconf get config values from configuration
- @echo groups get the groups which users belong to
- @echo snapshotDiff diff two snapshots of a directory or diff the
- @echo current directory contents with a snapshot
- @echo lsSnapshottableDir list all snapshottable dirs owned by the current user
- @echo Use -help to see options
- @echo cacheadmin configure the HDFS cache
- @echo mover run a utility to move block replicas across storage types
- @echo storagepolicies get all the existing block storage policies
- @echo.
- @echo Most commands print help when invoked w/o parameters.
- endlocal
|