diff --git a/bin/hbase b/bin/hbase index 1af2213001e4..13577ad86d85 100755 --- a/bin/hbase +++ b/bin/hbase @@ -91,35 +91,36 @@ if [ $# = 0 ]; then echo "" echo "Commands:" echo "Some commands take arguments. Pass no args or -h for usage." - echo " shell Run the HBase shell" - echo " hbck Run the HBase 'fsck' tool. Defaults read-only hbck1." - echo " Pass '-j /path/to/HBCK2.jar' to run hbase-2.x HBCK2." - echo " snapshot Tool for managing snapshots" + echo " shell Run the HBase shell" + echo " hbck Run the HBase 'fsck' tool. Defaults read-only hbck1." + echo " Pass '-j /path/to/HBCK2.jar' to run hbase-2.x HBCK2." + echo " snapshot Tool for managing snapshots" if [ "${in_omnibus_tarball}" = "true" ]; then - echo " wal Write-ahead-log analyzer" - echo " hfile Store file analyzer" - echo " zkcli Run the ZooKeeper shell" - echo " master Run an HBase HMaster node" - echo " regionserver Run an HBase HRegionServer node" - echo " zookeeper Run a ZooKeeper server" - echo " rest Run an HBase REST server" - echo " thrift Run the HBase Thrift server" - echo " thrift2 Run the HBase Thrift2 server" - echo " clean Run the HBase clean up script" + echo " wal Write-ahead-log analyzer" + echo " hfile Store file analyzer" + echo " zkcli Run the ZooKeeper shell" + echo " master Run an HBase HMaster node" + echo " regionserver Run an HBase HRegionServer node" + echo " zookeeper Run a ZooKeeper server" + echo " rest Run an HBase REST server" + echo " thrift Run the HBase Thrift server" + echo " thrift2 Run the HBase Thrift2 server" + echo " clean Run the HBase clean up script" fi - echo " classpath Dump hbase CLASSPATH" - echo " mapredcp Dump CLASSPATH entries required by mapreduce" - echo " pe Run PerformanceEvaluation" - echo " ltt Run LoadTestTool" - echo " canary Run the Canary tool" - echo " version Print the version" - echo " backup Backup tables for recovery" - echo " restore Restore tables from existing backup image" - echo " regionsplitter Run RegionSplitter tool" - echo " rowcounter Run RowCounter tool" - echo " cellcounter Run CellCounter tool" - echo " pre-upgrade Run Pre-Upgrade validator tool" - echo " CLASSNAME Run the class named CLASSNAME" + echo " classpath Dump hbase CLASSPATH" + echo " mapredcp Dump CLASSPATH entries required by mapreduce" + echo " pe Run PerformanceEvaluation" + echo " ltt Run LoadTestTool" + echo " canary Run the Canary tool" + echo " version Print the version" + echo " backup Backup tables for recovery" + echo " restore Restore tables from existing backup image" + echo " completebulkload Run BulkLoadHFiles tool" + echo " regionsplitter Run RegionSplitter tool" + echo " rowcounter Run RowCounter tool" + echo " cellcounter Run CellCounter tool" + echo " pre-upgrade Run Pre-Upgrade validator tool" + echo " CLASSNAME Run the class named CLASSNAME" exit 1 fi @@ -645,6 +646,8 @@ elif [ "$COMMAND" = "cellcounter" ] ; then CLASS='org.apache.hadoop.hbase.mapreduce.CellCounter' elif [ "$COMMAND" = "pre-upgrade" ] ; then CLASS='org.apache.hadoop.hbase.tool.PreUpgradeValidator' +elif [ "$COMMAND" = "completebulkload" ] ; then + CLASS='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool' else CLASS=$COMMAND fi diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java index ec349fe9cb6e..70aa26a28d64 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java @@ -182,13 +182,16 @@ public LoadIncrementalHFiles(Configuration conf) { } private void usage() { - System.err.println("usage: " + NAME + " /path/to/hfileoutputformat-output tablename -loadTable" - + "\n -D" + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by " - + "this tool\n Note: if you set this to 'no', then the target table must already exist " - + "in HBase\n -loadTable implies your baseDirectory to store file has a depth of 3 ,you" - + " must have an existing table\n-D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes - can be used " - + "to ignore unmatched column families\n" + - "\n"); + System.err.println("usage: " + "bin/hbase completebulkload <-Dargs> " + + " \n" + + "\t-D" + CREATE_TABLE_CONF_KEY + "=no can be used to avoid creation " + + "of a table by this tool.\n" + + "\t Note: if you set this to 'no', then target table must already exist.\n" + + "\t-D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes can be used to ignore " + + "unmatched column families.\n" + + "\t-loadTable switch implies your baseDirectory to store file has a " + + "depth of 3, table must exist\n" + + "\t and -loadTable switch is the last option on the command line.\n\n"); } /** diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc b/src/main/asciidoc/_chapters/ops_mgt.adoc index 4bec817d278a..9cbe3d359b88 100644 --- a/src/main/asciidoc/_chapters/ops_mgt.adoc +++ b/src/main/asciidoc/_chapters/ops_mgt.adoc @@ -841,7 +841,7 @@ $ bin/hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles +HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` ${HADOOP_HOME}/bin/hadoop jar ${HBASE_HOME}/hbase-mapreduce-VERSION.jar completebulkload ---- [[completebulkload.warning]]