{"rsdb":{"rid":"309084","subhead":"","postdate":"0","aid":"224246","fid":"118","uid":"1","topic":"1","content":"
\n\t\t\t\t\t\t\t\t\n \t\t\t\t\t\t\t\t \n\t\t\t\t\t\t
\n

\u672c\u535a\u6587\u7684\u4e3b\u8981\u5185\u5bb9\u6709<\/strong><\/span><\/p>\n

\u3000\u3000.HBase\u7684\u5355\u673a\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u5b89\u88c5<\/strong><\/span><\/p>\n

\u3000\u3000.HBase\u7684\u5355\u673a\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u7684\u542f\u52a8<\/strong><\/strong><\/span><\/p>\n

\u3000\u3000.HBase\u7684\u4f2a\u5206\u5e03\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u5b89\u88c5<\/strong><\/span><\/p>\n

\u3000\u3000 .HBase\u7684\u4f2a\u5206\u5e03\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u7684\u542f\u52a8<\/strong><\/span><\/p>\n

\u3000\u3000 .HBase\u7684\u5206\u5e03\u6a21\u5f0f\uff083\u30015\u8282\u70b9\uff09\u5b89\u88c5<\/span><\/strong><\/p>\n

\u3000 .HBase\u7684\u5206\u5e03\u6a21\u5f0f\uff083\u30015\u8282\u70b9\uff09\u7684\u542f\u52a8<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000\u3000\u3000\u89c1\u535a\u5ba2\uff1a HBase HA\u7684\u5206\u5e03\u5f0f\u96c6\u7fa4\u90e8\u7f72<\/a><\/h1>\n

\u3000 \u3000\u3000.HBase\u73af\u5883\u642d\u5efa60010\u7aef\u53e3\u65e0\u6cd5\u8bbf\u95ee\u95ee\u9898\u89e3\u51b3\u65b9\u6848<\/span><\/strong><\/p>\n

\u3000\u3000\u3000\u3000-------------\u3000\u3000\u3000<\/strong>\u6ce8\u610f\u3000HBase1.X\u7248\u672c\u4e4b\u540e\uff0c\u6ca160010\u4e86\u3002 -------------\u3000<\/strong><\/p>\n


\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u53c2\u8003\uff1ahttp:\/\/blog.csdn.net\/tian_li\/article\/details\/50601210<\/p>\n

\u3000\u3000 .\u8fdb\u5165HBase Shell<\/strong><\/span><\/p>\n

\u3000\u3000 .\u4e3a\u4ec0\u4e48\u5728HBase\uff0c\u9700\u8981\u4f7f\u7528zookeeper\uff1f<\/strong><\/span><\/p>\n

\u3000\u3000 .\u5173\u4e8eHBase\u7684\u66f4\u591a\u6280\u672f\u7ec6\u8282\uff0c\u5f3a\u70c8\u5fc5\u591a\u770b<\/strong><\/span><\/p>\n

\u3000\u3000\u3000 .\u83b7\u53d6\u547d\u4ee4\u5217\u8868\uff1ahelp\u5e2e\u52a9\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u521b\u5efa\u8868\uff1acreate\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u5411\u8868\u4e2d\u52a0\u5165\u884c\uff1aput\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u4ece\u8868\u4e2d\u68c0\u7d22\u884c\uff1aget\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u8bfb\u53d6\u591a\u884c\uff1ascan\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u7edf\u8ba1\u8868\u4e2d\u7684\u884c\u6570\uff1acount\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u5220\u9664\u884c\uff1adelete\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u6e05\u7a7a\u8868\uff1atruncate\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u5220\u9664\u8868:drop\u547d\u4ee4<\/strong><\/span><\/p>\n

\u3000\u3000\u3000\u3000.\u66f4\u6362\u8868 \uff1aalter\u547d\u4ee4<\/strong><\/span><\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u60f3\u8bf4\u7684\u662f\uff0c<\/strong><\/span><\/p>\n

HBase\u7684\u5b89\u88c5\u5305\u91cc\u9762\u6709\u81ea\u5e26zookeeper\u7684\u3002\u5f88\u591a\u7cfb\u7edf\u90e8\u7f72\u4e5f\u662f\u76f4\u63a5\u542f\u52a8\u4e0a\u9762\u7684zookeeper\u3002 \u672c\u6765\u4e5f\u662f\u6ca1\u6709\u95ee\u9898\u7684\uff0c\u60f3\u60f3\u5427\uff0c\u7cfb\u7edf\u91cc\u4e5f\u53ea\u6709hbase\u5728\u7528zookeeper\u3002
\u5148\u542f\u52a8zookeeper\uff0c\u518d\u5c06hbase\u8d77\u6765\u5c31\u597d\u4e86 \u3002
\u4f46\u662f\u4eca\u5929\u9047\u5230\u4e86\u4e00\u4e2a\u5f88\u86cb\u75bc\u7684\u95ee\u9898\u3002\u548c\u540c\u4e8b\u4e89\u8bba\u4e86\u5f88\u4e45\u3002 \u56e0\u4e3a\u6211\u4eec\u662f\u597d\u591ahbase\u96c6\u7fa4\u5171\u7528\u4e00\u4e2azookeeper\u7684\uff0c\u5176\u4e2d\u4e00\u4e2a\u96c6\u7fa4\u9700\u8981\u4ecehbase 0.90.2 \u5347\u7ea7\u5230hbase 0.92\u4e0a\uff0c\u81ea\u7136\uff0c\u5305\u4e5f\u8981\u66f4\u65b0\u3002
\u4f46\u662f\u5176\u4e2d\u4e00\u53f0regionserver\u4e0a\u9762\u540c\u65f6\u4e5f\u6709\u8dd1zookeeper\uff0c\u800czookeeper\u8fd8\u662f\u7528hbase 0.90.2 \u81ea\u5e26\u7684zookeeper\u5728\u8dd1\u3002
\u73b0\u5728\u597d\u4e86\uff0c\u5347\u7ea7\u4e00\u4e2aregionserver\uff0c\u8fde\u7740zookeeper\u4e5f\u8981\u53d7\u5230\u7275\u8fde\uff0c\u770b\u6765\u5fc5\u987b\u8981\u91cd\u542f\uff0c\u4e0d\u7136\uff0cjar\u5305\u66ff\u6362\u6389\uff0c\u53ef\u80fd\u4f1a\u5f71\u54cd\u5230zk\u6b63\u5728\u8dd1\u7684\u7ecf\u5e38\u3002
\u4f46\u662f\u91cd\u542fzk\u6bd5\u7adf\u5bf9\u6b63\u5728\u8fde\u63a5\u8fd9\u4e2azk\u7684client\u7aef\u4f1a\u6709\u77ed\u6682\u7684\u5f71\u54cd\u3002
\u771f\u662f\u86cb\u75bc\u3002\u672c\u6765\u53ea\u662f\u5347\u7ea7hbase\uff0czk\u5374\u5f3a\u8026\u5408\u4e86\u3002
\u867d\u7136\u540e\u6765\u8bc1\u660ezookeeper\u53ea\u8981\u542f\u52a8\u4e86\uff0c\u54ea\u6015jar\u5305\u5220\u9664\u4e5f\u4e0d\u4f1a\u5f71\u54cd\u5230\u6b63\u5728\u8dd1\u7684zk\u8fdb\u7a0b\uff0c\u4f46\u662f\u8fd9\u6837\u7684\u4e0d\u89c4\u8303\u5e26\u6765\u7684\u98ce\u9669\uff0c\u5b9e\u5728\u662f\u6ca1\u6709\u5fc5\u8981\u3002
\u6240\u4ee5\u4f5c\u4e3a\u8fd0\u7ef4\uff0c\u6211\u5f3a\u70c8\u5efa\u8baezk \u548chbase\u5206\u5f00\u90e8\u7f72\uff0c\u5c31\u76f4\u63a5\u90e8\u7f72\u5b98\u65b9\u7684zk \u597d\u4e86\uff0c\u56e0\u4e3azk\u672c\u8eab\u5c31\u662f\u4e00\u4e2a\u72ec\u7acb\u7684\u670d\u52a1\uff0c\u6ca1\u6709\u5fc5\u8981\u548chbase \u8026\u5408\u5728\u4e00\u8d77\u3002
\u5728\u5206\u5e03\u5f0f\u7684\u7cfb\u7edf\u90e8\u7f72\u4e0a\u9762\uff0c\u4e00\u4e2a\u89d2\u8272\u5c31\u7528\u4e00\u4e2a\u4e13\u95e8\u7684\u6587\u4ef6\u5939\u7ba1\u7406\uff0c\u4e0d\u8981\u7528\u540c\u4e00\u4e2a\u76ee\u5f55\u4e0b\uff0c\u8fd9\u6837\u5b50\u771f\u7684\u5bb9\u6613\u51fa\u95ee\u9898\u3002
\u5f53\u7136datanode\u548ctasktracker\u53e6\u5f53\u522b\u8bba\uff0c\u4ed6\u4eec\u672c\u8eab\u5173\u7cfb\u5bc6\u5207\u3002<\/pre>\n

<\/p>\n

\u3000\u3000\u5f53\u7136\uff0c\u8fd9\u91cc\uff0c\u6211\u662f\u73a9\u7684\u5355\u8282\u70b9\u7684\u96c6\u7fa4\uff0c\u6765\u5b89\u88c5HBase\u800c\u5df2\uff0c\u53ea\u662f\u6765\u73a9\u73a9\u3002\u6240\u4ee5\uff0c\u5b8c\u5168\uff0c\u53ea\u9700\u7528HBase\u7684\u5b89\u88c5\u5305\u91cc\u81ea\u5e26\u7684zookeeper\u5c31\u597d\u4e86\u3002<\/p>\n

\u9664\u975e\uff0c\u662f\u591a\u8282\u70b9\u7684\u5206\u5e03\u5f0f\u96c6\u7fa4\uff0c\u6700\u597d\u7528\u5916\u90e8\u7684zookeeper\u3002<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

HDFS\u7684\u7248\u672c\uff0c\u4e0d\u540c\uff0cHBase\u91cc\u7684\u5185\u90e8\u4e5f\u4e0d\u4e00\u6837\u3002<\/p>\n

\"\"<\/p>\n

<\/p>\n

http:\/\/hbase.apache.org\/<\/a><\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

https:\/\/issues.apache.org\/jira\/browse\/HBASE\/selectedTab=com.atlassian.jira.jira-projects-plugin:summary-panel<\/a><\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

\u3000.HBase\u7684\u5355\u673a\u6a21\u5f0f\u5b89\u88c5<\/strong><\/span><\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 app]$ ls
hadoop-2.4.1 hbase-0.96.2-hadoop2 hive-0.12.0 jdk1.7.0_65
[hadoop@weekend110 app]$ cd hbase-0.96.2-hadoop2\/
[hadoop@weekend110 hbase-0.96.2-hadoop2]$ ls
bin CHANGES.txt conf docs hbase-webapps lib LICENSE.txt logs NOTICE.txt README.txt
[hadoop@weekend110 hbase-0.96.2-hadoop2]$ cd conf\/
[hadoop@weekend110 conf]$ ls
hadoop-metrics2-hbase.properties hbase-env.cmd hbase-env.sh hbase-policy.xml hbase-site.xml log4j.properties regionservers
[hadoop@weekend110 conf]$ vim hbase-env.sh <\/span><\/p>\n

\"\"<\/p>\n

# Tell HBase whether it should manage it's own instance of Zookeeper or not.
export HBASE_MANAGES_ZK=true<\/span><\/p>\n

\u8bbeHBASE_MANAGES_ZK=true\uff0c\u5728\u542f\u52a8HBase\u65f6\uff0cHBase\u628aZookeeper\u4f5c\u4e3a\u81ea\u8eab\u7684\u4e00\u90e8\u5206\u8fd0\u884c\u3002<\/p>\n

<\/p>\n

\"\"<\/p>\n

export JAVA_HOME=\/home\/hadoop\/app\/jdk1.7.0_65<\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 conf]$ ls
hadoop-metrics2-hbase.properties hbase-env.cmd hbase-env.sh hbase-policy.xml hbase-site.xml log4j.properties regionservers
[hadoop@weekend110 conf]$ vim hbase-site.xml <\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

<configuration>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>hbase.rootdir<\/name>
\u3000\u3000\u3000\u3000<value>file:\/\/\/tmp\/hbase-hadoo<\/span>p\/hbase<\/value>
\u3000\u3000<\/property>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>dfs.replication<\/name>
\u3000\u3000\u3000\u3000<value>1<\/value>
\u3000\u3000<\/property>
<\/configuration><\/p>\n

<\/p>\n

\u5728\u8fd9\u91cc\uff0c\u6709\u4e9b\u8d44\u6599\u4e0a\u8bf4\uff0cfile:\/\/\/tmp\/hbase-${user.name}\/hbase<\/p>\n

\u3000\u3000\u53ef\u4ee5\u770b\u5230\uff0c\u9ed8\u8ba4\u60c5\u51b5\u4e0bHBase\u7684\u6570\u636e\u5b58\u50a8\u5728\u6839\u76ee\u5f55\u4e0b\u7684tmp\u6587\u4ef6\u5939\u4e0b\u7684\u3002\u719f\u6089Linux\u7684\u4eba\u77e5\u9053\uff0c\u6b64\u6587\u4ef6\u5939\u4e3a\u4e34\u65f6\u6587\u4ef6\u5939\u3002\u4e5f\u5c31\u662f\u8bf4\uff0c\u5f53\u7cfb\u7edf\u91cd\u542f\u7684\u65f6\u5019\uff0c\u6b64\u6587\u4ef6\u5939\u4e2d\u7684\u5185\u5bb9\u5c06\u88ab\u6e05\u7a7a\u3002\u8fd9\u6837\u7528\u6237\u4fdd\u5b58\u5728HBase\u4e2d\u7684\u6570\u636e\u4e5f\u4f1a\u4e22\u5931\uff0c\u8fd9\u5f53\u7136\u662f\u7528\u6237\u4e0d\u60f3\u770b\u5230\u7684\u4e8b\u60c5\u3002\u56e0\u6b64\uff0c\u7528\u6237\u9700\u8981\u5c06HBase\u6570\u636e\u7684\u5b58\u50a8\u4f4d\u7f6e\u4fee\u6539\u4e3a\u81ea\u5df1\u5e0c\u671b\u7684\u5b58\u50a8\u4f4d\u7f6e\u3002<\/p>\n

\u3000\u3000\u6bd4\u5982\uff0c\u53ef\u4ee5\uff0c\/home\/hadoop\/data\/hbase\uff0c\u5f53\u7136\uff0c\u6211\u8fd9\u91cc\uff0c\u662f\u56e0\u4e3a\uff0c\u4f2a\u5206\u5e03\u6a21\u5f0f\u548c\u5206\u5e03\u5f0f\u6a21\u5f0f\uff0c\u90fd\u73a9\u8fc7\u4e86\u3002\u65b9\u4fbf\uff0c\u7ec3\u4e60\u52a0\u5f3aHBase\u7684shell\u64cd\u4f5c\u3002\u800c\u5df2\uff0c\u62ff\u5355\u673a\u6a21\u5f0f\u73a9\u73a9\u3002<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u3000\u3000.HBase\u7684\u5355\u673a\u6a21\u5f0f\u7684\u542f\u52a8<\/strong><\/span><\/p>\n

\u603b\u7ed3\u5c31\u662f\uff1a\u5148\u542f\u52a8hadoop\u96c6\u7fa4\u7684\u8fdb\u7a0b\uff0c\u518d\u542f\u52a8hbase\u7684\u8fdb\u7a0b<\/strong><\/span><\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 hbase-0.96.2-hadoop2]$ cd bin
[hadoop@weekend110 bin]$ ls
get-active-master.rb hbase-common.sh hbase-jruby region_mover.rb start-hbase.cmd thread-pool.rb
graceful_stop.sh hbase-config.cmd hirb.rb regionservers.sh start-hbase.sh zookeepers.sh
hbase hbase-config.sh local-master-backup.sh region_status.rb stop-hbase.cmd
hbase-cleanup.sh hbase-daemon.sh local-regionservers.sh replication stop-hbase.sh
hbase.cmd hbase-daemons.sh master-backup.sh rolling-restart.sh test
[hadoop@weekend110 bin]$ jps
2443 NameNode
2970 NodeManager
2539 DataNode
2729 SecondaryNameNode
2866 ResourceManager
4634 Jps
[hadoop@weekend110 bin]$ .\/start-hbase.sh<\/span>
starting master, logging to \/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/logs\/hbase-hadoop-master-weekend110.out
[hadoop@weekend110 bin]$ jps
2443 NameNode
2970 NodeManager
2539 DataNode
2729 SecondaryNameNode
2866 ResourceManager
4740 HMaster
4819 Jps
[hadoop@weekend110 bin]$ hbase shell<\/span>
2016-10-12 12:43:11,095 INFO [main] Configuration.deprecation: hadoop.native.lib is deprecated. Instead, use io.native.lib.available
HBase Shell; enter 'help<RETURN>' for list of supported commands.
Type \"exit<RETURN>\" to leave the HBase Shell
Version 0.96.2-hadoop2, r1581096, Mon Mar 24 16:03:18 PDT 2014<\/p>\n

hbase(main):001:0> list
TABLE
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:\/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/lib\/slf4j-log4j12-1.6.4.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:\/home\/hadoop\/app\/hadoop-2.4.1\/share\/hadoop\/common\/lib\/slf4j-log4j12-1.7.5.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]
SLF4J: See http:\/\/www.slf4j.org\/codes.html#multiple_bindings for an explanation.
0 row(s) in 3.8200 seconds<\/p>\n

=> []
hbase(main):002:0> create 'mygirls', {NAME => 'base_info',VERSION => 3},{NAME => 'extra_info'}
Unknown argument ignored for column family base_info: 1.8.7
0 row(s) in 1.1560 seconds<\/p>\n

=> Hbase::Table - mygirls
hbase(main):003:0><\/p>\n

<\/p>\n

<\/p>\n

\u6d4b\u8bd5<\/p>\n

http:\/\/weekend110:60010\/<\/a><\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

.HBase\u7684\u4f2a\u5206\u5e03\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u5b89\u88c5<\/strong><\/span><\/p>\n

1\u3001 hbase-0.96.2-hadoop2-bin.tar.gz\u538b\u7f29\u5305\u7684\u4e0a\u4f20<\/strong><\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

sftp> cd \/home\/hadoop\/app<\/span><\/p>\n

sftp> put c:\/hbase-0.96.2-hadoop2-bin.tar.gz<\/span><\/p>\n

Uploading hbase-0.96.2-hadoop2-bin.tar.gz to \/home\/hadoop\/app\/hbase-0.96.2-hadoop2-bin.tar.gz<\/p>\n

100% 77507KB 19376KB\/s 00:00:04<\/p>\n

c:\/hbase-0.96.2-hadoop2-bin.tar.gz: 79367504 bytes transferred in 4 seconds (19376 KB\/s)<\/p>\n

sftp><\/p>\n

<\/p>\n

\"\"<\/p>\n

\u6216\u8005\uff0c\u901a\u8fc7<\/p>\n

<\/p>\n

\"\"<\/p>\n

\u8fd9\u91cc\u4e0d\u591a\u8d58\u8ff0\u3002\u5177\u4f53\uff0c\u53ef\u4ee5\u770b\u6211\u7684\u5176\u4ed6\u535a\u5ba2<\/p>\n

<\/p>\n

<\/p>\n

2\u3001 <\/strong>hbase-0.96.2-hadoop2-bin.tar.gz<\/strong>\u538b\u7f29\u5305\u7684\u89e3\u538b<\/strong><\/p>\n

\"\"<\/strong><\/p>\n

[hadoop@weekend110 app]$ ls<\/p>\n

hadoop-2.4.1 hbase-0.96.2-hadoop2-bin.tar.gz hive-0.12.0 jdk1.7.0_65 zookeeper-3.4.6<\/p>\n

[hadoop@weekend110 app]$ ll<\/p>\n

total 77524<\/p>\n

drwxr-xr-x. 11 hadoop hadoop 4096 Jul 18 20:11 hadoop-2.4.1<\/p>\n

-rw-r--r--. 1 root root 79367504 May 20 13:51 hbase-0.96.2-hadoop2-bin.tar.gz<\/p>\n

drwxrwxr-x. 10 hadoop hadoop 4096 Oct 10 21:30 hive-0.12.0<\/p>\n

drwxr-xr-x. 8 hadoop hadoop 4096 Jun 17 2014 jdk1.7.0_65<\/p>\n

drwxr-xr-x. 10 hadoop hadoop 4096 Jul 30 10:28 zookeeper-3.4.6<\/p>\n

[hadoop@weekend110 app]$ tar -zxvf hbase-0.96.2-hadoop2-bin.tar.gz<\/span><\/p>\n

<\/p>\n

<\/p>\n

3\u3001\u5220\u9664\u538b\u7f29\u5305hbase-0.96.2-hadoop2-bin.tar.gz<\/strong><\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

4\u3001\u5c06HBase<\/strong>\u6587\u4ef6\u6743\u9650\u8d4b\u4e88\u7ed9hadoop<\/strong>\u7528\u6237\uff0c\u8fd9\u4e00\u6b65\uff0c\u4e0d\u9700\u3002<\/strong><\/p>\n

5\u3001HBase<\/strong>\u7684\u914d\u7f6e<\/strong><\/p>\n

\"\"<\/strong><\/p>\n

\u3000\u3000\u6ce8\u610f\u5566\uff0c\u5728hbase-0.96.2-hadoop2\u7684\u76ee\u5f55\u4e0b\uff0c\u6709hbase-webapps\uff0c\u5373\uff0c\u8bf4\u660e\uff0c\u53ef\u4ee5\u901a\u8fc7web\u7f51\u9875\u6765\u8bbf\u95eeHBase\u3002<\/strong><\/span><\/p>\n

<\/p>\n

<\/p>\n

[hadoop@weekend110 app]$ ls<\/p>\n

hadoop-2.4.1 hbase-0.96.2-hadoop2 hive-0.12.0 jdk1.7.0_65 zookeeper-3.4.6<\/p>\n

[hadoop@weekend110 app]$ cd hbase-0.96.2-hadoop2\/<\/span><\/p>\n

[hadoop@weekend110 hbase-0.96.2-hadoop2]$ ll<\/p>\n

total 436<\/p>\n

drwxr-xr-x. 4 hadoop hadoop 4096 Mar 25 2014 bin<\/p>\n

-rw-r--r--. 1 hadoop hadoop 403242 Mar 25 2014 CHANGES.txt<\/p>\n

drwxr-xr-x. 2 hadoop hadoop 4096 Mar 25 2014 conf<\/p>\n

drwxr-xr-x. 27 hadoop hadoop 4096 Mar 25 2014 docs<\/p>\n

drwxr-xr-x. 7 hadoop hadoop 4096 Mar 25 2014 hbase-webapps<\/p>\n

drwxrwxr-x. 3 hadoop hadoop 4096 Oct 11 17:49 lib<\/p>\n

-rw-r--r--. 1 hadoop hadoop 11358 Mar 25 2014 LICENSE.txt<\/p>\n

-rw-r--r--. 1 hadoop hadoop 897 Mar 25 2014 NOTICE.txt<\/p>\n

-rw-r--r--. 1 hadoop hadoop 1377 Mar 25 2014 README.txt<\/p>\n

[hadoop@weekend110 hbase-0.96.2-hadoop2]$ cd conf\/<\/span><\/p>\n

[hadoop@weekend110 conf]$ ls<\/p>\n

hadoop-metrics2-hbase.properties hbase-env.cmd hbase-env.sh hbase-policy.xml hbase-site.xml log4j.properties regionservers<\/p>\n

[hadoop@weekend110 conf]$<\/p>\n

<\/p>\n

<\/p>\n

\u5bf9\u4e8e\uff0c\u591a\u8282\u70b9\u91cc\uff0c\u5b89\u88c5HBase\uff0c\u8fd9\u91cc\u4e0d\u591a\u8bf4\u4e86\u3002\u5177\u4f53\uff0c\u53ef\u4ee5\u770b\u6211\u7684\u535a\u5ba2<\/span><\/strong><\/p>\n

\"\"<\/p>\n

1.\u4e0a\u4f20hbase\u5b89\u88c5\u5305<\/p>\n

<\/p>\n

2.\u89e3\u538b<\/p>\n

<\/p>\n

3.\u914d\u7f6ehbase\u96c6\u7fa4\uff0c\u8981\u4fee\u65393\u4e2a\u6587\u4ef6\uff08\u9996\u5148zk\u96c6\u7fa4\u5df2\u7ecf\u5b89\u88c5\u597d\u4e86\uff09<\/p>\n

\u6ce8\u610f\uff1a\u8981\u628ahadoop\u7684hdfs-site.xml\u548ccore-site.xml \u653e\u5230hbase\/conf\u4e0b<\/p>\n

<\/p>\n

3.1\u4fee\u6539hbase-env.sh<\/p>\n

export JAVA_HOME=\/usr\/java<\/a>\/jdk1.7.0_55<\/p>\n

\/\/\u544a\u8bc9hbase\u4f7f\u7528\u5916\u90e8\u7684zk<\/p>\n

export HBASE_MANAGES_ZK=false<\/p>\n

<\/p>\n

vim hbase-site.xml<\/p>\n

<configuration><\/p>\n

<!-- \u6307\u5b9ahbase\u5728HDFS\u4e0a\u5b58\u50a8\u7684\u8def\u5f84 --><\/p>\n

<property><\/p>\n

<name>hbase.rootdir<\/name><\/p>\n

<value>hdfs:\/\/ns1\/hbase<\/value><\/p>\n

<\/property><\/p>\n

<!-- \u6307\u5b9ahbase\u662f\u5206\u5e03\u5f0f\u7684 --><\/p>\n

<property><\/p>\n

<name>hbase.cluster.distributed<\/name><\/p>\n

<value>true<\/value><\/p>\n

<\/property><\/p>\n

<!-- \u6307\u5b9azk\u7684\u5730\u5740\uff0c\u591a\u4e2a\u7528\u201c,\u201d\u5206\u5272 --><\/p>\n

<property><\/p>\n

<name>hbase.zookeeper.quorum<\/name><\/p>\n

<value>weekend04:2181,weekend05:2181,weekend06:2181<\/value><\/p>\n

<\/property><\/p>\n

<\/configuration><\/p>\n

<\/p>\n

vim regionservers<\/p>\n

weekend03<\/p>\n

weekend04<\/p>\n

weekend05<\/p>\n

weekend06<\/p>\n

<\/p>\n

3.2\u62f7\u8d1dhbase\u5230\u5176\u4ed6\u8282\u70b9<\/p>\n

scp -r \/weekend\/hbase-0.96.2-hadoop2\/ weekend02:\/weekend\/<\/p>\n

scp -r \/weekend\/hbase-0.96.2-hadoop2\/ weekend03:\/weekend\/<\/p>\n

scp -r \/weekend\/hbase-0.96.2-hadoop2\/ weekend04:\/weekend\/<\/p>\n

scp -r \/weekend\/hbase-0.96.2-hadoop2\/ weekend05:\/weekend\/<\/p>\n

scp -r \/weekend\/hbase-0.96.2-hadoop2\/ weekend06:\/weekend\/<\/p>\n

4.\u5c06\u914d\u7f6e\u597d\u7684HBase\u62f7\u8d1d\u5230\u6bcf\u4e00\u4e2a\u8282\u70b9\u5e76\u540c\u6b65\u65f6\u95f4\u3002<\/p>\n

<\/p>\n

5.\u542f\u52a8\u6240\u6709\u7684hbase<\/p>\n

\u5206\u522b\u542f\u52a8zk<\/p>\n

.\/zkServer.sh start<\/p>\n

\u542f\u52a8hbase\u96c6\u7fa4<\/p>\n

start-dfs.sh<\/p>\n

\u542f\u52a8hbase\uff0c\u5728\u4e3b\u8282\u70b9\u4e0a\u8fd0\u884c\uff1a<\/p>\n

start-hbase.sh<\/p>\n

6.\u901a\u8fc7\u6d4f\u89c8\u5668\u8bbf\u95eehbase\u7ba1\u7406\u9875\u9762<\/p>\n

192.168.1.201:60010<\/p>\n

7.\u4e3a\u4fdd\u8bc1\u96c6\u7fa4\u7684\u53ef\u9760\u6027\uff0c\u8981\u542f\u52a8\u591a\u4e2aHMaster<\/p>\n

hbase-daemon.sh start master<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u6211\u8fd9\u91cc\uff0c\u56e0\uff0c\u8003\u8651\u5230\u81ea\u5df1\u73a9\u73a9\uff0c\u4f2a\u5206\u5e03\u96c6\u7fa4\u91cc\u5b89\u88c5HBase\u3002<\/strong><\/p>\n

hbase-env.sh<\/strong><\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 conf]$ ls<\/p>\n

hadoop-metrics2-hbase.properties hbase-env.cmd hbase-env.sh hbase-policy.xml hbase-site.xml log4j.properties regionservers<\/p>\n

[hadoop@weekend110 conf]$ vim hbase-env.sh<\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

\/home\/hadoop\/app\/jdk1.7.0_65<\/p>\n

<\/p>\n

\u5355\u8282\u70b9\u7684hbase-env.sh\uff0c\u9700\u8981\u4fee\u65392\u5904\u3002<\/strong><\/span><\/p>\n

\"\"<\/p>\n

export JAVA_HOME=\/home\/hadoop\/app\/jdk1.7.0_65<\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

export HBASE_MANAGES_ZK=false<\/span><\/p>\n

<\/p>\n

.\u4e3a\u4ec0\u4e48\u5728HBase\uff0c\u9700\u8981\u4f7f\u7528zookeeper\uff1f<\/strong><\/span><\/p>\n

<\/p>\n

\u3000\u3000\u5927\u5bb6\uff0c\u5f88\u591a\u4eba\uff0c\u90fd\u6709\u4e00\u4e2a\u7591\u95ee\uff0c\u4e3a\u4ec0\u4e48\u5728HBase\uff0c\u9700\u8981\u4f7f\u7528zookeeper\uff1f\u81f3\u4e8e\u4e3a\u4ec0\u4e48\u6700\u597d\u4f7f\u7528\u5916\u90e8\u5b89\u88c5\u7684zookeeper\uff0c\u800c\u4e0d\u662fHBase\u81ea\u5e26\u7684zookeeper\uff0c\u8fd9\u91cc\uff0c\u6211\u5b9e\u5728\u662f\u4e0d\u591a\u8d58\u8ff0\u4e86\u3002<\/p>\n

\u3000\u3000zookeeper\u5b58\u50a8\u7684\u662fHBase\u4e2dROOT\u8868\u548cMETA\u8868\u7684\u4f4d\u7f6e\u3002\u6b64\u5916\uff0czookeeper\u8fd8\u8d1f\u8d23\u76d1\u63a7\u591a\u4e2a\u673a\u5668\u7684\u72b6\u6001\uff08\u6bcf\u53f0\u673a\u5668\u5230zookeeper\u4e2d\u6ce8\u518c\u4e00\u4e2a\u5b9e\u4f8b\uff09\u3002\u5f53\u67d0\u53f0\u673a\u5668\u53d1\u751f\u6545\u969c\u65f6<\/p>\n

\uff0czookeeper\u4f1a\u7b2c\u4e00\u65f6\u95f4\u611f\u77e5\u5230\uff0c\u5e76\u901a\u77e5HBase Master\u8fdb\u884c\u76f8\u5e94\u7684\u5904\u7406\u3002\u540c\u65f6\uff0c\u5f53HBase Master\u53d1\u751f\u6545\u969c\u7684\u65f6\u5019\uff0czookeeper\u8fd8\u8d1f\u8d23HBase Master\u7684\u6062\u590d\u5de5\u4f5c\uff0c\u80fd\u591f\u4fdd\u8bc1\u8fd8\u5728\u540c\u4e00\u65f6\u523b\u7cfb\u7edf\u4e2d\u53ea\u6709\u4e00\u53f0HBase Master\u63d0\u4f9b\u670d\u52a1\u3002<\/p>\n

\u3000\u3000\u5177\u4f53\u4f8b\u5b50\uff0c\u89c1<\/p>\n

HBase HA\u7684\u5206\u5e03\u5f0f\u96c6\u7fa4\u90e8\u7f72<\/a> \u7684\u6700\u4f4e\u7aef\u3002<\/h1>\n

<\/p>\n

\"\"<\/p>\n

hbase-site.xml<\/strong><\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 conf]$ ls<\/p>\n

hadoop-metrics2-hbase.properties hbase-env.cmd hbase-env.sh hbase-policy.xml hbase-site.xml log4j.properties regionservers<\/p>\n

[hadoop@weekend110 conf]$ vim hbase-site.xml<\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

<configuration><\/p>\n

<property><\/p>\n

<name>hbase.zookeeper.property.dataDir<\/name><\/p>\n

<value>\/home\/hadoop\/data\/zookeeper\/zkdata<\/value><\/p>\n

<\/property><\/p>\n

<property><\/p>\n

<name>hbase.tmp.dir<\/name><\/p>\n

<value>\/home\/hadoop\/data\/tmp\/hbase<\/value><\/p>\n

<\/property><\/p>\n

<property><\/p>\n

<name>hbase.zookeeper.property.clientPort<\/name><\/p>\n

<value>2181<\/value><\/p>\n

<\/property><\/p>\n

<property><\/p>\n

<name>hbase.rootdir<\/name><\/p>\n

<value>hdfs:\/\/weekend110:9000\/hbase<\/value><\/p>\n

<\/property><\/p>\n

<property><\/p>\n

<name>hbase.cluster.distributed<\/name><\/p>\n

<value>false<\/value><\/p>\n

<\/property><\/p>\n

<property><\/p>\n

<name>dfs.replication<\/name><\/p>\n

<value>1<\/value><\/p>\n

<\/property><\/p>\n

<\/configuration><\/p>\n

<\/p>\n

<\/p>\n

\u65b0\u5efa\u76ee\u5f55<\/strong><\/p>\n

\/home\/hadoop\/data\/zookeeper\/zkdata<\/strong><\/p>\n

\/home\/hadoop\/data\/tmp\/hbase<\/strong><\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 conf]$ pwd<\/p>\n

\/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/conf<\/p>\n

[hadoop@weekend110 conf]$ mkdir -p \/home\/hadoop\/data\/zookeeper\/zkdata<\/span><\/p>\n

[hadoop@weekend110 conf]$ mkdir -p \/home\/hadoop\/data\/tmp\/hbase<\/span><\/p>\n

[hadoop@weekend110 conf]$<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

regionservers<\/strong><\/p>\n

\"\"<\/strong><\/p>\n

\"\"<\/p>\n

weekend110<\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 conf]$ ls<\/p>\n

hadoop-metrics2-hbase.properties hbase-env.cmd hbase-env.sh hbase-policy.xml hbase-site.xml log4j.properties regionservers<\/p>\n

[hadoop@weekend110 conf]$ cp \/home\/hadoop\/app\/hadoop-2.4.1\/etc\/hadoop\/{core-site.xml,hdfs-site.xml} .\/<\/span><\/p>\n

[hadoop@weekend110 conf]$ ls<\/p>\n

core-site.xml hbase-env.cmd hbase-policy.xml hdfs-site.xml regionservers<\/p>\n

hadoop-metrics2-hbase.properties hbase-env.sh hbase-site.xml log4j.properties<\/p>\n

[hadoop@weekend110 conf]$<\/p>\n

<\/p>\n

<\/p>\n

vi \/etc\/profile<\/strong><\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 conf]$ su root<\/p>\n

Password:<\/p>\n

[root@weekend110 conf]# vim \/etc\/profile<\/span><\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

export JAVA_HOME=\/home\/hadoop\/app\/jdk1.7.0_65<\/p>\n

export HADOOP_HOME=\/home\/hadoop\/app\/hadoop-2.4.1<\/p>\n

export ZOOKEEPER_HOME=\/home\/hadoop\/app\/zookeeper-3.4.6<\/p>\n

export HIVE_HOME=\/home\/hadoop\/app\/hive-0.12.0<\/p>\n

export HBASE_HOME=\/home\/hadoop\/app\/hbase-0.96.2-hadoop2<\/span><\/p>\n

export PATH=$PATH:$JAVA_HOME\/bin:$HADOOP_HOME\/bin:$HADOOP_HOME\/sbin:$ZOOKEEPER_HOME\/bin:$HIVE_HOME\/bin:$HBASE_HOME\/bin<\/span><\/p>\n

<\/p>\n

\"\"<\/p>\n

[root@weekend110 conf]# source \/etc\/profile<\/span><\/p>\n

[root@weekend110 conf]# su hadoop<\/span><\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

.HBase\u7684\u4f2a\u5206\u5e03\u6a21\u5f0f\u7684\u542f\u52a8<\/strong><\/span><\/p>\n

\u7531\u4e8e\u4f2a\u5206\u5e03\u6a21\u5f0f\u7684\u8fd0\u884c\u57fa\u4e8eHDFS\uff0c\u56e0\u6b64\u5728\u8fd0\u884cHBase\u4e4b\u524d\u9996\u5148\u9700\u8981\u542f\u52a8HDFS\uff0c<\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 hadoop-2.4.1]$ jps<\/p>\n

5802 Jps<\/p>\n

[hadoop@weekend110 hadoop-2.4.1]$ sbin\/start-all.sh<\/p>\n

This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh<\/p>\n

Starting namenodes on [weekend110]<\/p>\n

weekend110: starting namenode, logging to \/home\/hadoop\/app\/hadoop-2.4.1\/logs\/hadoop-hadoop-namenode-weekend110.out<\/p>\n

weekend110: starting datanode, logging to \/home\/hadoop\/app\/hadoop-2.4.1\/logs\/hadoop-hadoop-datanode-weekend110.out<\/p>\n

Starting secondary namenodes [0.0.0.0]<\/p>\n

0.0.0.0: starting secondarynamenode, logging to \/home\/hadoop\/app\/hadoop-2.4.1\/logs\/hadoop-hadoop-secondarynamenode-weekend110.out<\/p>\n

starting yarn daemons<\/p>\n

starting resourcemanager, logging to \/home\/hadoop\/app\/hadoop-2.4.1\/logs\/yarn-hadoop-resourcemanager-weekend110.out<\/p>\n

weekend110: starting nodemanager, logging to \/home\/hadoop\/app\/hadoop-2.4.1\/logs\/yarn-hadoop-nodemanager-weekend110.out<\/p>\n

[hadoop@weekend110 hadoop-2.4.1]$ jps<\/p>\n

6022 DataNode<\/p>\n

6149 SecondaryNameNode<\/p>\n

5928 NameNode<\/p>\n

6287 ResourceManager<\/p>\n

6426 Jps<\/p>\n

6387 NodeManager<\/p>\n

[hadoop@weekend110 hadoop-2.4.1]$<\/p>\n

<\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 hbase-0.96.2-hadoop2]$ pwd<\/p>\n

\/home\/hadoop\/app\/hbase-0.96.2-hadoop2<\/p>\n

[hadoop@weekend110 hbase-0.96.2-hadoop2]$ ls<\/p>\n

bin CHANGES.txt conf docs hbase-webapps lib LICENSE.txt NOTICE.txt README.txt<\/p>\n

[hadoop@weekend110 hbase-0.96.2-hadoop2]$ cd bin<\/p>\n

[hadoop@weekend110 bin]$ ls<\/p>\n

get-active-master.rb hbase-common.sh hbase-jruby region_mover.rb start-hbase.cmd thread-pool.rb<\/p>\n

graceful_stop.sh hbase-config.cmd hirb.rb regionservers.sh start-hbase.sh zookeepers.sh<\/p>\n

hbase hbase-config.sh local-master-backup.sh region_status.rb stop-hbase.cmd<\/p>\n

hbase-cleanup.sh hbase-daemon.sh local-regionservers.sh replication stop-hbase.sh<\/p>\n

hbase.cmd hbase-daemons.sh master-backup.sh rolling-restart.sh test<\/p>\n

[hadoop@weekend110 bin]$ .\/start-hbase.sh<\/p>\n

starting master, logging to \/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/logs\/hbase-hadoop-master-weekend110.out<\/p>\n

[hadoop@weekend110 bin]$ jps<\/p>\n

6022 DataNode<\/p>\n

6149 SecondaryNameNode<\/p>\n

5928 NameNode<\/p>\n

6707 Jps<\/p>\n

6287 ResourceManager<\/p>\n

6530 HMaster<\/strong><\/p>\n

6387 NodeManager<\/p>\n

[hadoop@weekend110 bin]$<\/p>\n

<\/p>\n

http:\/\/weekend110:60010\/<\/a><\/p>\n

\"\"<\/p>\n

\u53c2\u8003\u535a\u5ba2\uff1ahttp:\/\/blog.csdn.net\/u013575812\/article\/details\/46919011<\/a><\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

[hadoop@weekend110 bin]$ pwd<\/p>\n

\/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/bin<\/p>\n

[hadoop@weekend110 bin]$ hadoop dfsadmin -safemode leave<\/span><\/strong><\/p>\n

DEPRECATED: Use of this script to execute hdfs command is deprecated.<\/p>\n

Instead use the hdfs command for it.<\/p>\n

<\/p>\n

Safe mode is OFF<\/p>\n

[hadoop@weekend110 bin]$ jps<\/p>\n

6022 DataNode<\/p>\n

7135 Jps<\/p>\n

6149 SecondaryNameNode<\/p>\n

5928 NameNode<\/p>\n

6287 ResourceManager<\/p>\n

6387 NodeManager<\/p>\n

[hadoop@weekend110 bin]$ .\/start-hbase.sh<\/span><\/p>\n

starting master, logging to \/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/logs\/hbase-hadoop-master-weekend110.out<\/p>\n

[hadoop@weekend110 bin]$ jps<\/p>\n

6022 DataNode<\/p>\n

7245 HMaster<\/p>\n

6149 SecondaryNameNode<\/p>\n

5928 NameNode<\/p>\n

6287 ResourceManager<\/p>\n

6387 NodeManager<\/p>\n

7386 Jps<\/p>\n

[hadoop@weekend110 bin]$<\/p>\n

<\/p>\n

\"\"<\/p>\n

\u4f9d\u65e7\u5982\u6b64\uff0c\u7ee7\u7eed...\u89e3\u51b3!<\/p>\n

<\/p>\n

\u53c2\u8003\u535a\u5ba2:http:\/\/www.th7.cn\/db\/nosql\/201510\/134214.shtml<\/p>\n

<\/p>\n

\u3000\u3000\u5728\u5b89\u88c5hbase-0.96.2-hadoop2\u65f6\u53d1\u73b0\u4e00\u4e2a\u95ee\u9898\uff0chbase\u80fd\u591f\u6b63\u5e38\u4f7f\u7528\uff0chbase shell \u5b8c\u5168\u53ef\u7528\uff0c\u4f46\u662f60010\u9875\u9762\u5374\u6253\u4e0d\u5f00<\/span>\uff0c\u6700\u540e\u627e\u5230\u95ee\u9898\uff0c\u662f\u56e0\u4e3a\u5f88\u591a\u7248\u672c\u7684hbase\u7684master web \u9ed8\u8ba4\u662f\u4e0d\u8fd0\u884c\u7684\uff0c\u6240\u4ee5\u9700\u8981\u81ea\u5df1\u914d\u7f6e\u9ed8\u8ba4\u7aef\u53e3\u3002<\/p>\n

\u3000\u3000<\/p>\n

\u914d\u7f6e\u5982\u4e0b<\/p>\n

\u3000\u3000\u5728hbase-site.xml\u4e2d\u52a0\u5165\u4e00\u4e0b\u5185\u5bb9\u5373\u53ef<\/p>\n

\u3000\u3000\u3000\u3000<property>
\u3000\u3000\u3000\u3000\u3000\u3000<name>hbase.master.info.port<\/name>
\u3000\u3000\u3000\u3000\u3000\u3000<value>60010<\/value>
\u3000\u3000\u3000\u3000<\/property><\/p>\n

\u3000\u3000\u3000<property>
\u3000\u3000\u3000\u3000\u3000\u3000<name>hbase.regionserver.info.port<\/name>
\u3000\u3000\u3000\u3000\u3000\u3000<value>60020<\/value>
\u3000\u3000\u3000\u3000<\/property><\/p>\n

<\/p>\n

\"\"<\/p>\n

<configuration>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>hbase.zookeeper.property.dataDir<\/name>
\u3000\u3000\u3000\u3000<value>\/home\/hadoop\/data\/zookeeper\/zkdata<\/value>
\u3000\u3000<\/property>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>hbase.tmp.dir<\/name>
\u3000\u3000\u3000\u3000<value>\/home\/hadoop\/data\/tmp\/hbase<\/value>
\u3000\u3000<\/property>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>hbase.zookeeper.property.clientPort<\/name>
\u3000\u3000\u3000\u3000<value>2181<\/value>
\u3000\u3000<\/property>
\u3000\u3000<property><\/span>
\u3000\u3000\u3000\u3000<name>hbase.master.info.port<\/name><\/span>
\u3000\u3000\u3000\u3000<value>60010<\/value><\/span>
\u3000\u3000<\/property><\/span>
\u3000\u3000<property><\/span>
\u3000\u3000\u3000\u3000<name>hbase.regionserver.info.port<\/name><\/span>
\u3000\u3000\u3000\u3000<value>60020<\/value><\/span>
\u3000\u3000<\/property><\/span>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>hbase.rootdir<\/name>
\u3000\u3000\u3000\u3000<value>hdfs:\/\/weekend110:9000\/hbase<\/value>
\u3000\u3000<\/property>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>hbase.cluster.distributed<\/name>
\u3000\u3000\u3000\u3000<value>false<\/value>
\u3000\u3000<\/property>
\u3000\u3000<property>
\u3000\u3000\u3000\u3000<name>dfs.replication<\/name>
\u3000\u3000\u3000\u3000<value>1<\/value>
\u3000\u3000<\/property>
<\/configuration><\/p>\n

<\/p>\n

<\/p>\n

\u8fdb\u5165HBase Shell<\/strong><\/span><\/p>\n

<\/p>\n

\u8fdb\u5165hbase\u547d\u4ee4\u884c
.\/hbase shell<\/p>\n

\u663e\u793ahbase\u4e2d\u7684\u8868
list<\/p>\n

\u521b\u5efauser\u8868\uff0c\u5305\u542binfo\u3001data\u4e24\u4e2a\u5217\u65cf
create 'user', 'info1', 'data1'
create 'user', {NAME => 'info', VERSIONS => '3'}<\/p>\n

\u5411user\u662f\u8868<\/span>\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u662frk0001<\/span>\uff0c\u5217\u65cf\u662finfo<\/span>\u4e2d\u6dfb\u52a0name\u662f\u5217\u4fee\u9970\u7b26\uff08\u6807\u8bc6\u7b26\uff09<\/span>\uff0c\u503c\u662fzhangsan<\/span>
put 'user', 'rk0001', 'info:name', 'zhangsan'<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u662frk0001\uff0c\u5217\u65cfinfo\u4e2d\u6dfb\u52a0gender\u5217\u6807\u793a\u7b26\uff0c\u503c\u662ffemale
put 'user', 'rk0001', 'info:gender', 'female'<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u662frk0001\uff0c\u5217\u65cfinfo\u4e2d\u6dfb\u52a0age\u5217\u6807\u793a\u7b26\uff0c\u503c\u662f20
put 'user', 'rk0001', 'info:age', 20<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u662frk0001\uff0c\u5217\u65cfdata\u4e2d\u6dfb\u52a0pic\u5217\u6807\u793a\u7b26\uff0c\u503c\u662fpicture
put 'user', 'rk0001', 'data:pic', 'picture'<\/p>\n

\u83b7\u53d6<\/span>user\u8868\u4e2drow key\u4e3ark0001\u7684\u6240\u6709\u4fe1\u606f
get 'user', 'rk0001'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u662frk0001\uff0cinfo\u5217\u65cf\u7684\u6240\u6709\u4fe1\u606f
get 'user', 'rk0001', 'info'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u662frk0001\uff0cinfo\u5217\u65cf\u7684name\u3001age\u5217\u6807\u793a\u7b26\u7684\u4fe1\u606f
get 'user', 'rk0001', 'info:name', 'info:age'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u662frk0001\uff0cinfo\u3001data\u5217\u65cf\u7684\u4fe1\u606f
get 'user', 'rk0001', 'info', 'data'
get 'user', 'rk0001', {COLUMN => ['info', 'data']}<\/p>\n

get 'user', 'rk0001', {COLUMN => ['info:name', 'data:pic']}<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u662frk0001\uff0c\u5217\u65cf\u662finfo\uff0c\u7248\u672c\u53f7\u6700\u65b05\u4e2a\u7684\u4fe1\u606f
get 'user', 'rk0001', {COLUMN => 'info', VERSIONS => 2}
get 'user', 'rk0001', {COLUMN => 'info:name', VERSIONS => 5}
get 'user', 'rk0001', {COLUMN => 'info:name', VERSIONS => 5, TIMERANGE => [1392368783980, 1392380169184]}<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u662frk0001\uff0ccell\u7684\u503c\u662fzhangsan\u7684\u4fe1\u606f
get 'people', 'rk0001', {FILTER => \"ValueFilter(=, 'binary:\u56fe\u7247')\"}<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u662frk0001\uff0c\u5217\u6807\u793a\u7b26\u4e2d\u542b\u6709a\u7684\u4fe1\u606f
get 'people', 'rk0001', {FILTER => \"(QualifierFilter(=,'substring:a'))\"}<\/p>\n

put 'user', 'rk0002', 'info:name', 'fanbingbing'
put 'user', 'rk0002', 'info:gender', 'female'
put 'user', 'rk0002', 'info:nationality', '\u4e2d\u56fd'
get 'user', 'rk0002', {FILTER => \"ValueFilter(=, 'binary:\u4e2d\u56fd')\"}<\/p>\n


\u67e5\u8be2user\u8868\u4e2d\u7684\u6240\u6709\u4fe1\u606f
scan 'user'<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u7684\u4fe1\u606f
scan 'user', {COLUMNS => 'info'}
scan 'user', {COLUMNS => 'info', RAW => true, VERSIONS => 5}
scan 'persion', {COLUMNS => 'info', RAW => true, VERSIONS => 3}
\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u548cdata\u7684\u4fe1\u606f
scan 'user', {COLUMNS => ['info', 'data']}
scan 'user', {COLUMNS => ['info:name', 'data:pic']}<\/p>\n


\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u662finfo\u3001\u5217\u6807\u793a\u7b26\u662fname\u7684\u4fe1\u606f
scan 'user', {COLUMNS => 'info:name'}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u662finfo\u3001\u5217\u6807\u793a\u7b26\u662fname\u7684\u4fe1\u606f,\u5e76\u4e14\u7248\u672c\u6700\u65b0\u76845\u4e2a
scan 'user', {COLUMNS => 'info:name', VERSIONS => 5}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u548cdata\u4e14\u5217\u6807\u793a\u7b26\u4e2d\u542b\u6709a\u5b57\u7b26\u7684\u4fe1\u606f
scan 'user', {COLUMNS => ['info', 'data'], FILTER => \"(QualifierFilter(=,'substring:a'))\"}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\uff0crk\u8303\u56f4\u662f[rk0001, rk0003)\u7684\u6570\u636e
scan 'people', {COLUMNS => 'info', STARTROW => 'rk0001', ENDROW => 'rk0003'}<\/p>\n

\u67e5\u8be2user\u8868\u4e2drow key\u4ee5rk\u5b57\u7b26\u5f00\u5934\u7684
scan 'user',{FILTER=>\"PrefixFilter('rk')\"}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u6307\u5b9a\u8303\u56f4\u7684\u6570\u636e
scan 'user', {TIMERANGE => [1392368783980, 1392380169184]}<\/p>\n


\u5220\u9664user\u8868row key\u4e3ark0001\uff0c\u5217\u6807\u793a\u7b26\u4e3ainfo:name\u7684\u6570\u636e
delete 'people', 'rk0001', 'info:name'
\u5220\u9664user\u8868row key\u4e3ark0001\uff0c\u5217\u6807\u793a\u7b26\u4e3ainfo:name\uff0ctimestamp\u4e3a1392383705316\u7684\u6570\u636e
delete 'user', 'rk0001', 'info:name', 1392383705316<\/p>\n


\u6e05\u7a7auser\u8868\u4e2d\u7684\u6570\u636e
truncate 'people'<\/p>\n

<\/p>\n

<\/p>\n

\u4fee\u6539\u8868\u7ed3\u6784
\u9996\u5148\u505c\u7528user\u8868\uff08\u65b0\u7248\u672c\u4e0d\u7528\uff09
disable 'user'<\/p>\n

\u6dfb\u52a0\u4e24\u4e2a\u5217\u65cff1\u548cf2
alter 'people', NAME => 'f1'
alter 'user', NAME => 'f2'
\u542f\u7528\u8868
enable 'user'<\/p>\n


###disable 'user'(\u65b0\u7248\u672c\u4e0d\u7528)
\u5220\u9664\u4e00\u4e2a\u5217\u65cf\uff1a
alter 'user', NAME => 'f1', METHOD => 'delete' \u6216 alter 'user', 'delete' => 'f1'<\/p>\n

\u6dfb\u52a0\u5217\u65cff1\u540c\u65f6\u5220\u9664\u5217\u65cff2
alter 'user', {NAME => 'f1'}, {NAME => 'f2', METHOD => 'delete'}<\/p>\n

\u5c06user\u8868\u7684f1\u5217\u65cf\u7248\u672c\u53f7\u6539\u4e3a5
alter 'people', NAME => 'info', VERSIONS => 5
\u542f\u7528\u8868
enable 'user'<\/p>\n


\u5220\u9664\u8868
disable 'user'
drop 'user'<\/p>\n


get 'person', 'rk0001', {FILTER => \"ValueFilter(=, 'binary:\u4e2d\u56fd')\"}
get 'person', 'rk0001', {FILTER => \"(QualifierFilter(=,'substring:a'))\"}
scan 'person', {COLUMNS => 'info:name'}
scan 'person', {COLUMNS => ['info', 'data'], FILTER => \"(QualifierFilter(=,'substring:a'))\"}
scan 'person', {COLUMNS => 'info', STARTROW => 'rk0001', ENDROW => 'rk0003'}<\/p>\n

scan 'person', {COLUMNS => 'info', STARTROW => '20140201', ENDROW => '20140301'}
scan 'person', {COLUMNS => 'info:name', TIMERANGE => [1395978233636, 1395987769587]}
delete 'person', 'rk0001', 'info:name'<\/p>\n

alter 'person', NAME => 'ffff'
alter 'person', NAME => 'info', VERSIONS => 10<\/p>\n


get 'user', 'rk0002', {COLUMN => ['info:name', 'data:pic']}<\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 bin]$ pwd
\/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/bin
[hadoop@weekend110 bin]$ .\/hbase shell
2016-10-12 10:09:42,925 INFO [main] Configuration.deprecation: hadoop.native.lib is deprecated. Instead, use io.native.lib.available
HBase Shell; enter 'help<RETURN>' for list of supported commands.
Type \"exit<RETURN>\" to leave the HBase Shell
Version 0.96.2-hadoop2, r1581096, Mon Mar 24 16:03:18 PDT 2014<\/p>\n

hbase(main):001:0><\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

hbase(main):001:0>help
HBase Shell, version 0.96.2-hadoop2, r1581096, Mon Mar 24 16:03:18 PDT 2014
Type 'help \"COMMAND\"', (e.g. 'help \"get\"' -- the quotes are necessary) for help on a specific command.
Commands are grouped. Type 'help \"COMMAND_GROUP\"', (e.g. 'help \"general\"') for help on a command group.<\/p>\n

COMMAND GROUPS:<\/strong> \/\/\u7f57\u5217\u51fa\u4e86\u6240\u6709\u7684\u547d\u4ee4<\/strong>
Group name: general \/\/\u901a\u5e38\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u5c06\u8fd4\u56de\u96c6\u7fa4\u7ea7\u7684\u901a\u7528\u4fe1\u606f\u3002<\/strong>
Commands: status, table_help, version, whoami<\/p>\n

Group name: ddl <\/strong> \/\/ddl\u64cd\u4f5c\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u4f1a\u521b\u5efa\u3001\u66f4\u6362\u548c\u5220\u9664HBase\u8868<\/strong>
Commands: alter, alter_async, alter_status, create, describe, disable, disable_all, drop, drop_all, enable, enable_all, exists, get_table, is_disabled, is_enabled, list, show_filters<\/p>\n

Group name: namespace \/\/namespace\u547d\u4ee4<\/strong>
Commands: alter_namespace, create_namespace, describe_namespace, drop_namespace, list_namespace, list_namespace_tables<\/p>\n

Group name: dml\u3000\u3000\u3000\u3000\u3000\u3000\/\/dml\u64cd\u4f5c\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u4f1a\u65b0\u589e\u3001\u4fee\u6539\u548c\u5220\u9664HBase\u8868\u4e2d\u7684\u6570\u636e<\/strong>
Commands: count, delete, deleteall, get, get_counter, incr, put, scan, truncate, truncate_preserve<\/p>\n

Group name: tools\u3000\u3000\u3000\u3000 \/\/tools\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u53ef\u4ee5\u7ef4\u62a4HBase\u96c6\u7fa4<\/strong>
Commands: assign, balance_switch, balancer, catalogjanitor_enabled, catalogjanitor_run, catalogjanitor_switch, close_region, compact, flush, hlog_roll, major_compact, merge_region, move, split, trace, unassign, zk_dump<\/p>\n

Group name: replication\u3000\u3000\u3000\u3000\/\/replication\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u53ef\u4ee5\u589e\u52a0\u548c\u5220\u9664\u96c6\u7fa4\u7684\u8282\u70b9<\/strong>
Commands: add_peer, disable_peer, enable_peer, list_peers, list_replicated_tables, remove_peer<\/p>\n

Group name: snapshot\u3000\u3000\u3000\u3000 \/\/snapshot\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u7528\u4e8e\u5bf9HBase\u96c6\u7fa4\u8fdb\u884c\u5feb\u7167\u4ee5\u4fbf\u5907\u4efd\u548c\u6062\u590d\u96c6\u7fa4<\/strong>
Commands: clone_snapshot, delete_snapshot, list_snapshots, rename_snapshot, restore_snapshot, snapshot<\/p>\n

Group name: security\u3000\u3000\u3000\u3000\u3000\u3000\/\/security\u547d\u4ee4\uff0c\u8fd9\u4e9b\u547d\u4ee4\u53ef\u4ee5\u63a7\u5236HBase\u7684\u5b89\u5168\u6027<\/strong>
Commands: grant, revoke, user_permission<\/p>\n

SHELL USAGE:
Quote all names in HBase Shell such as table and column names. Commas delimit
command parameters. Type <RETURN> after entering a command to run it.
Dictionaries of configuration used in the creation and alteration of tables are
Ruby Hashes. They look like this:<\/p>\n

{'key1' => 'value1', 'key2' => 'value2', ...}<\/p>\n

and are opened and closed with curley-braces. Key\/values are delimited by the
'=>' character combination. Usually keys are predefined constants such as
NAME, VERSIONS, COMPRESSION, etc. Constants do not need to be quoted. Type
'Object.constants' to see a (messy) list of all constants in the environment.<\/p>\n

If you are using binary keys or values and need to enter them in the shell, use
double-quote'd hexadecimal representation. For example:<\/p>\n

hbase> get 't1', \"key\\x03\\x3f\\xcd\"
hbase> get 't1', \"key\\003\\023\\011\"
hbase> put 't1', \"test\\xef\\xff\", 'f1:', \"\\x01\\x33\\x40\"<\/p>\n

The HBase shell is the (J)Ruby IRB with the above HBase-specific commands added.
For more on the HBase Shell, see http:\/\/hbase.apache.org\/docs\/current\/book.html
hbase(main):002:0><\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

hbase(main):002:0> version
0.96.2-hadoop2, r1581096, Mon Mar 24 16:03:18 PDT 2014<\/p>\n

hbase(main):003:0> create<\/span><\/p>\n

ERROR: wrong number of arguments (0 for 1)<\/p>\n

Here is some help for this command:
Creates a table. Pass a table name, and a set of column family
specifications (at least one), and, optionally, table configuration.
Column specification can be a simple string (name), or a dictionary
(dictionaries are described below in main help output), necessarily
including NAME attribute.
Examples: \/\/\u8fd9\u91cc\uff0c\u6709\u4f8b\u5b50<\/strong><\/p>\n

Create a table with namespace=ns1 and table qualifier=t1
hbase> create 'ns1:t1', {NAME => 'f1', VERSIONS => 5}<\/p>\n

Create a table with namespace=default and table qualifier=t1
hbase> create 't1', {NAME => 'f1'}, {NAME => 'f2'}, {NAME => 'f3'}
hbase> # The above in shorthand would be the following:
hbase> create 't1', 'f1', 'f2', 'f3'
hbase> create 't1', {NAME => 'f1', VERSIONS => 1, TTL => 2592000, BLOCKCACHE => true}
hbase> create 't1', {NAME => 'f1', CONFIGURATION => {'hbase.hstore.blockingStoreFiles' => '10'}}

Table configuration options can be put at the end.
Examples: \/\/\u8fd9\u91cc\uff0c\u6709\u4f8b\u5b50<\/strong><\/strong><\/p>\n

hbase> create 'ns1:t1', 'f1', SPLITS => ['10', '20', '30', '40']
hbase> create 't1', 'f1', SPLITS => ['10', '20', '30', '40']
hbase> create 't1', 'f1', SPLITS_FILE => 'splits.txt', OWNER => 'johndoe'
hbase> create 't1', {NAME => 'f1', VERSIONS => 5}, METADATA => { 'mykey' => 'myvalue' }
hbase> # Optionally pre-split the table into NUMREGIONS, using
hbase> # SPLITALGO (\"HexStringSplit\", \"UniformSplit\" or classname)
hbase> create 't1', 'f1', {NUMREGIONS => 15, SPLITALGO => 'HexStringSplit'}
hbase> create 't1', 'f1', {NUMREGIONS => 15, SPLITALGO => 'HexStringSplit', CONFIGURATION => {'hbase.hregion.scan.loadColumnFamiliesOnDemand' => 'true'}}<\/p>\n

You can also keep around a reference to the created table:<\/p>\n

hbase> t1 = create 't1', 'f1'<\/p>\n

Which gives you a reference to the table named 't1', on which you can then
call methods.<\/p>\n


hbase(main):004:0> <\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

[hadoop@weekend110 bin]$ jps
2443 NameNode
2970 NodeManager
7515 Jps
2539 DataNode
2729 SecondaryNameNode
2866 ResourceManager
[hadoop@weekend110 bin]$ pwd
\/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/bin
[hadoop@weekend110 bin]$ .\/start-hbase.sh
starting master, logging to \/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/logs\/hbase-hadoop-master-weekend110.out
[hadoop@weekend110 bin]$ jps
2443 NameNode
7623 HMaster
2970 NodeManager
2539 DataNode
2729 SecondaryNameNode
2866 ResourceManager
7686 Jps
[hadoop@weekend110 bin]$ .\/hbase shell
2016-10-12 15:53:46,394 INFO [main] Configuration.deprecation: hadoop.native.lib is deprecated. Instead, use io.native.lib.available
HBase Shell; enter 'help<RETURN>' for list of supported commands.
Type \"exit<RETURN>\" to leave the HBase Shell
Version 0.96.2-hadoop2, r1581096, Mon Mar 24 16:03:18 PDT 2014<\/p>\n

hbase(main):001:0> list
TABLE
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:\/home\/hadoop\/app\/hbase-0.96.2-hadoop2\/lib\/slf4j-log4j12-1.6.4.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:\/home\/hadoop\/app\/hadoop-2.4.1\/share\/hadoop\/common\/lib\/slf4j-log4j12-1.7.5.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]
SLF4J: See http:\/\/www.slf4j.org\/codes.html#multiple_bindings for an explanation.
0 row(s) in 2.8190 seconds<\/p>\n

=> []
hbase(main):002:0> create 'mygirls', {NAME => 'base_info',VERSIONS => 3},{NAME => 'extra_info'}<\/span>
0 row(s) in 1.1080 seconds<\/p>\n

=> Hbase::Table - mygirls
hbase(main):003:0> <\/p>\n

<\/p>\n

<\/p>\n

describe<\/strong><\/p>\n

\"\"<\/p>\n

hbase(main):010:0> describe 'mygirls'<\/span>
DESCRIPTION ENABLED
'mygirls', {NAME => 'base_info', DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'ROW', true
REPLICATION_SCOPE => '0', VERSIONS => '3', COMPRESSION => 'NONE', MIN_VERSIONS => '0
', TTL => '2147483647', KEEP_DELETED_CELLS => 'false', BLOCKSIZE => '65536', IN_MEMOR
Y => 'false', BLOCKCACHE => 'true'}, {NAME => 'extra_info', DATA_BLOCK_ENCODING => 'N
ONE', BLOOMFILTER => 'ROW', REPLICATION_SCOPE => '0', VERSIONS => '1', COMPRESSION =>
'NONE', MIN_VERSIONS => '0', TTL => '2147483647', KEEP_DELETED_CELLS => 'false', BLO
CKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}
1 row(s) in 0.0980 seconds<\/p>\n

hbase(main):011:0> <\/p>\n

<\/p>\n

<\/p>\n

disable \u548c drop \uff0c\u5148\u5f97disable\uff08\u4e0b\u7ebf\uff09\uff0c\u518d\u624d\u80fddrop\uff08\u5220\u6389\uff09<\/strong><\/p>\n

\"\"<\/p>\n

hbase(main):002:0> disable 'mygirls'<\/span>
0 row(s) in 1.2030 seconds<\/p>\n

hbase(main):003:0> drop 'mygirls'<\/span>
0 row(s) in 0.4270 seconds<\/p>\n

hbase(main):004:0> <\/p>\n

<\/p>\n

<\/p>\n

put<\/strong><\/p>\n

\"\"<\/p>\n

hbase(main):011:0> put<\/p>\n

ERROR: wrong number of arguments (0 for 4)<\/p>\n

Here is some help for this command:
Put a cell 'value' at specified table\/row\/column and optionally
timestamp coordinates. To put a cell value into table 'ns1:t1' or 't1'
at row 'r1' under column 'c1' marked with the time 'ts1', do:<\/p>\n

hbase> put 'ns1:t1', 'r1', 'c1', 'value', ts1 \/\/\u4f8b\u5b50<\/strong><\/p>\n

The same commands also can be run on a table reference. Suppose you had a reference
t to table 't1', the corresponding command would be:<\/p>\n

hbase> t.put 'r1', 'c1', 'value', ts1 \/\/\u4f8b\u5b50<\/strong><\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):012:0> put 'mygirls','0001','base_info:name','fengjie'<\/span>
0 row(s) in 0.3240 seconds<\/p>\n

hbase(main):013:0> put 'mygirls','0001','base_info:age','18'<\/span>
0 row(s) in 0.0170 seconds<\/p>\n

hbase(main):014:0> put 'mygirls','0001','base_info:sex','jipinnvren'<\/span>
0 row(s) in 0.0130 seconds<\/p>\n

hbase(main):015:0> put 'mygirls','0001','base_info:boyfriend','huangxiaoming'<\/span>
0 row(s) in 0.0590 seconds<\/p>\n

<\/p>\n

<\/p>\n

get<\/strong><\/p>\n

\"\"<\/p>\n

hbase(main):016:0> get<\/span><\/p>\n

ERROR: wrong number of arguments (0 for 2)<\/p>\n

Here is some help for this command:
Get row or cell contents; pass table name, row, and optionally
a dictionary of column(s), timestamp, timerange and versions. Examples: \/\/\u4f8b\u5b50<\/strong><\/p>\n

hbase> get 'ns1:t1', 'r1'
hbase> get 't1', 'r1'
hbase> get 't1', 'r1', {TIMERANGE => [ts1, ts2]}
hbase> get 't1', 'r1', {COLUMN => 'c1'}
hbase> get 't1', 'r1', {COLUMN => ['c1', 'c2', 'c3']}
hbase> get 't1', 'r1', {COLUMN => 'c1', TIMESTAMP => ts1}
hbase> get 't1', 'r1', {COLUMN => 'c1', TIMERANGE => [ts1, ts2], VERSIONS => 4}
hbase> get 't1', 'r1', {COLUMN => 'c1', TIMESTAMP => ts1, VERSIONS => 4}
hbase> get 't1', 'r1', {FILTER => \"ValueFilter(=, 'binary:abc')\"}
hbase> get 't1', 'r1', 'c1'
hbase> get 't1', 'r1', 'c1', 'c2'
hbase> get 't1', 'r1', ['c1', 'c2']<\/p>\n

Besides the default 'toStringBinary' format, 'get' also supports custom formatting by
column. A user can define a FORMATTER by adding it to the column name in the get
specification. The FORMATTER can be stipulated: <\/p>\n

1. either as a org.apache.hadoop.hbase.util.Bytes method name (e.g, toInt, toString)
2. or as a custom class followed by method name: e.g. 'c(MyFormatterClass).format'.<\/p>\n

Example formatting cf:qualifier1 and cf:qualifier2 both as Integers:
hbase> get 't1', 'r1' {COLUMN => ['cf:qualifier1:toInt',
'cf:qualifier2:c(org.apache.hadoop.hbase.util.Bytes).toInt'] } <\/p>\n

Note that you can specify a FORMATTER by column only (cf:qualifer). You cannot specify
a FORMATTER for all columns of a column family.

The same commands also can be run on a reference to a table (obtained via get_table or
create_table). Suppose you had a reference t to table 't1', the corresponding commands
would be:<\/p>\n

hbase> t.get 'r1'
hbase> t.get 'r1', {TIMERANGE => [ts1, ts2]}
hbase> t.get 'r1', {COLUMN => 'c1'}
hbase> t.get 'r1', {COLUMN => ['c1', 'c2', 'c3']}
hbase> t.get 'r1', {COLUMN => 'c1', TIMESTAMP => ts1}
hbase> t.get 'r1', {COLUMN => 'c1', TIMERANGE => [ts1, ts2], VERSIONS => 4}
hbase> t.get 'r1', {COLUMN => 'c1', TIMESTAMP => ts1, VERSIONS => 4}
hbase> t.get 'r1', {FILTER => \"ValueFilter(=, 'binary:abc')\"}
hbase> t.get 'r1', 'c1'
hbase> t.get 'r1', 'c1', 'c2'
hbase> t.get 'r1', ['c1', 'c2']<\/p>\n


hbase(main):017:0> <\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):017:0> get 'mygirls','0001'<\/span>
COLUMN CELL
base_info:age timestamp=1476259587999, value=18
base_info:boyfriend timestamp=1476259597469, value=huangxiaoming
base_info:name timestamp=1476259582217, value=fengjie
base_info:sex timestamp=1476259593138, value=jipinnvren
4 row(s) in 0.0320 seconds<\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):018:0> put 'mygirls','0001','base_info:name','fengbaobao'<\/span>
0 row(s) in 0.0140 seconds<\/p>\n

hbase(main):019:0> get 'mygirls','0001'<\/span>
COLUMN CELL
base_info:age timestamp=1476259587999, value=18
base_info:boyfriend timestamp=1476259597469, value=huangxiaoming
base_info:name timestamp=1476259871197, value=fengbaobao
base_info:sex timestamp=1476259593138, value=jipinnvren
4 row(s) in 0.0480 seconds<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):020:0> get 'mygirls','0001',{COLUMN => 'base_info:name',VERSIONS => 10}<\/span>
COLUMN CELL
base_info:name timestamp=1476259871197, value=fengbaobao
base_info:name timestamp=1476259582217, value=fengjie
2 row(s) in 0.0700 seconds<\/p>\n

\u5f97\u5230\uff0c2\u4e2a\u7248\u672c\u3002\u610f\u601d\u662f\uff0c\u6700\u591a\u53ef\u5f97\u523010\u4e2a\u7248\u672c<\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):021:0> put 'mygirls','0001','base_info:name','fengfeng'<\/span>
0 row(s) in 0.0240 seconds<\/p>\n

hbase(main):022:0> get 'mygirls','0001',{COLUMN => 'base_info:name',VERSIONS => 10}<\/span>
COLUMN CELL
base_info:name timestamp=1476260199839, value=fengfeng
base_info:name timestamp=1476259871197, value=fengbaobao
base_info:name timestamp=1476259582217, value=fengjie
3 row(s) in 0.0550 seconds<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):023:0> put 'mygirls','0001','base_info:name','qinaidefeng'
0 row(s) in 0.0160 seconds<\/p>\n

hbase(main):024:0> get 'mygirls','0001',{COLUMN => 'base_info:name',VERSIONS => 10}
COLUMN CELL
base_info:name timestamp=1476260274142, value=qinaidefeng
base_info:name timestamp=1476260199839, value=fengfeng
base_info:name timestamp=1476259871197, value=fengbaobao
3 row(s) in 0.0400 seconds<\/p>\n

\u53ea\u5b58\uff0c\u6700\u8fd1\u76843\u4e2a\u7248\u672c\u3002<\/p>\n

<\/p>\n

<\/p>\n

scan<\/strong><\/p>\n

\"\"<\/p>\n

hbase(main):025:0> scan<\/span><\/p>\n

ERROR: wrong number of arguments (0 for 1)<\/p>\n

Here is some help for this command:
Scan a table; pass table name and optionally a dictionary of scanner
specifications. Scanner specifications may include one or more of:
TIMERANGE, FILTER, LIMIT, STARTROW, STOPROW, TIMESTAMP, MAXLENGTH,
or COLUMNS, CACHE<\/p>\n

If no columns are specified, all columns will be scanned.
To scan all members of a column family, leave the qualifier empty as in
'col_family:'.<\/p>\n

The filter can be specified in two ways:
1. Using a filterString - more information on this is available in the
Filter Language document attached to the HBASE-4176 JIRA
2. Using the entire package name of the filter.<\/p>\n

Some examples: \/\/\u4f8b\u5b50<\/strong><\/p>\n

hbase> scan 'hbase:meta'
hbase> scan 'hbase:meta', {COLUMNS => 'info:regioninfo'}
hbase> scan 'ns1:t1', {COLUMNS => ['c1', 'c2'], LIMIT => 10, STARTROW => 'xyz'}
hbase> scan 't1', {COLUMNS => ['c1', 'c2'], LIMIT => 10, STARTROW => 'xyz'}
hbase> scan 't1', {COLUMNS => 'c1', TIMERANGE => [1303668804, 1303668904]}
hbase> scan 't1', {FILTER => \"(PrefixFilter ('row2') AND
(QualifierFilter (>=, 'binary:xyz'))) AND (TimestampsFilter ( 123, 456))\"}
hbase> scan 't1', {FILTER =>
org.apache.hadoop.hbase.filter.ColumnPaginationFilter.new(1, 0)}<\/p>\n

For experts, there is an additional option -- CACHE_BLOCKS -- which
switches block caching for the scanner on (true) or off (false). By
default it is enabled. Examples:<\/p>\n

hbase> scan 't1', {COLUMNS => ['c1', 'c2'], CACHE_BLOCKS => false}<\/p>\n

Also for experts, there is an advanced option -- RAW -- which instructs the
scanner to return all cells (including delete markers and uncollected deleted
cells). This option cannot be combined with requesting specific COLUMNS.
Disabled by default. Example:<\/p>\n

hbase> scan 't1', {RAW => true, VERSIONS => 10}<\/p>\n

Besides the default 'toStringBinary' format, 'scan' supports custom formatting
by column. A user can define a FORMATTER by adding it to the column name in
the scan specification. The FORMATTER can be stipulated: <\/p>\n

1. either as a org.apache.hadoop.hbase.util.Bytes method name (e.g, toInt, toString)
2. or as a custom class followed by method name: e.g. 'c(MyFormatterClass).format'.<\/p>\n

Example formatting cf:qualifier1 and cf:qualifier2 both as Integers:
hbase> scan 't1', {COLUMNS => ['cf:qualifier1:toInt',
'cf:qualifier2:c(org.apache.hadoop.hbase.util.Bytes).toInt'] } <\/p>\n

Note that you can specify a FORMATTER by column only (cf:qualifer). You cannot
specify a FORMATTER for all columns of a column family.<\/p>\n

Scan can also be used directly from a table, by first getting a reference to a
table, like such:<\/p>\n

hbase> t = get_table 't'
hbase> t.scan<\/p>\n

Note in the above situation, you can still provide all the filtering, columns,
options, etc as described above.<\/p>\n

<\/p>\n

hbase(main):026:0> <\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n


hbase(main):026:0> scan 'mygirls'<\/span>
ROW COLUMN+CELL
0001 column=base_info:age, timestamp=1476259587999, value=18
0001 column=base_info:boyfriend, timestamp=1476259597469, value=huangxiaoming
0001 column=base_info:name, timestamp=1476260274142, value=qinaidefeng
0001 column=base_info:sex, timestamp=1476259593138, value=jipinnvren
1 row(s) in 0.1040 seconds<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

hbase(main):027:0> scan 'mygirls',{RAW => true,VERSIONS => 10}<\/span>
ROW COLUMN+CELL
0001 column=base_info:age, timestamp=1476259587999, value=18
0001 column=base_info:boyfriend, timestamp=1476259597469, value=huangxiaoming
0001 column=base_info:name, timestamp=1476260274142, value=qinaidefeng
0001 column=base_info:name, timestamp=1476260199839, value=fengfeng
0001 column=base_info:name, timestamp=1476259871197, value=fengbaobao
0001 column=base_info:name, timestamp=1476259582217, value=fengjie
0001 column=base_info:sex, timestamp=1476259593138, value=jipinnvren
1 row(s) in 0.0660 seconds<\/p>\n

hbase(main):028:0> get 'mygirls','0001',{COLUMN => 'base_info:name',VERSIONS => 10,RAW => true}<\/span>
COLUMN CELL
base_info:name timestamp=1476260274142, value=qinaidefeng
base_info:name timestamp=1476260199839, value=fengfeng
base_info:name timestamp=1476259871197, value=fengbaobao
3 row(s) in 0.0170 seconds<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u4e3a\u4ec0\u4e48\uff0cscan\u80fd\u628a\u8001\u7248\u672c\u90fd\u663e\u793a\u51fa\u6765\uff1f
\u7b54\uff1a\u539f\u56e0\u662f\uff0c\u4f60\u6700\u65b0\u7684\u64cd\u4f5c\uff0c\u5e76\u6ca1\u6709\u771f\u6b63\u5237\u5230Hadoop\u6587\u4ef6\u91cc\u3002<\/p>\n

\u6700\u65b0\u7684\u64cd\u4f5c\uff0c\u662f\u5728HLog\u91cc\u3002<\/p>\n

\u8981\u7b49\u5230\uff0c\u628a\u8fd9\u4e2aHLog\u5237\u5230MeroStore\u91cc\u6216HFile\u91cc\uff0c\u624d\u80fd\u628a\u90a3\u4e9b\u8fc7\u671f\u7684\u6570\u636e\u7ed9\u6e05\u9664\u6389\u3002<\/p>\n

\u90a3\u591a\u4e45\u624d\u4f1a\u5237\u5462\u6709\u4e2a\u9ed8\u8ba4\u673a\u5236\u3002 \u8fd8\u6709\uff0c\u7acb\u5373\u628aHBase\u505c\u6389\u3002<\/p>\n

<\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u5176\u5b9e\u554a\uff0cHBase Shell\u64cd\u4f5c\uff0c\u5728\u5e73\u5e38\u5de5\u4f5c\u4e2d\uff0c\u90fd\u4e0d\u4f1a\u8fd9\u4e48\u64cd\u4f5c\u3002\u56e0\u4e3a\uff0c\u4e0d\u5efa\u8bae\uff0c\u5f88\u4e0d\u597d\u7528\uff0c\u4e0d\u65b9\u4fbf\u3002\uff01\uff01\uff01<\/p>\n

\"\"<\/p>\n

hbase> scan 't1', {FILTER => \"(PrefixFilter ('row2') AND
(QualifierFilter (>=, 'binary:xyz'))) AND (TimestampsFilter ( 123, 456))\"}
hbase> scan 't1', {FILTER =>
org.apache.hadoop.hbase.filter.ColumnPaginationFilter.new(1, 0)}<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u66f4\u591a\uff0c\u81ea\u884c\u53bb\u73a9\u73a9\u3002\u8fd9\u91cc\uff0c\u4e0d\u591a\u8d58\u8ff0\u3002<\/p>\n

\u8fdb\u5165hbase\u547d\u4ee4\u884c
.\/hbase shell<\/p>\n

\u663e\u793ahbase\u4e2d\u7684\u8868
list<\/p>\n

\u521b\u5efauser\u8868\uff0c\u5305\u542binfo\u3001data\u4e24\u4e2a\u5217\u65cf
create 'user', 'info1', 'data1'
create 'user', {NAME => 'info', VERSIONS => '3'}<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u4e3ark0001\uff0c\u5217\u65cfinfo\u4e2d\u6dfb\u52a0name\u5217\u6807\u793a\u7b26\uff0c\u503c\u4e3azhangsan
put 'user', 'rk0001', 'info:name', 'zhangsan'<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u4e3ark0001\uff0c\u5217\u65cfinfo\u4e2d\u6dfb\u52a0gender\u5217\u6807\u793a\u7b26\uff0c\u503c\u4e3afemale
put 'user', 'rk0001', 'info:gender', 'female'<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u4e3ark0001\uff0c\u5217\u65cfinfo\u4e2d\u6dfb\u52a0age\u5217\u6807\u793a\u7b26\uff0c\u503c\u4e3a20
put 'user', 'rk0001', 'info:age', 20<\/p>\n

\u5411user\u8868\u4e2d\u63d2\u5165\u4fe1\u606f\uff0crow key\u4e3ark0001\uff0c\u5217\u65cfdata\u4e2d\u6dfb\u52a0pic\u5217\u6807\u793a\u7b26\uff0c\u503c\u4e3apicture
put 'user', 'rk0001', 'data:pic', 'picture'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\u7684\u6240\u6709\u4fe1\u606f
get 'user', 'rk0001'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\uff0cinfo\u5217\u65cf\u7684\u6240\u6709\u4fe1\u606f
get 'user', 'rk0001', 'info'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\uff0cinfo\u5217\u65cf\u7684name\u3001age\u5217\u6807\u793a\u7b26\u7684\u4fe1\u606f
get 'user', 'rk0001', 'info:name', 'info:age'<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\uff0cinfo\u3001data\u5217\u65cf\u7684\u4fe1\u606f
get 'user', 'rk0001', 'info', 'data'
get 'user', 'rk0001', {COLUMN => ['info', 'data']}<\/p>\n

get 'user', 'rk0001', {COLUMN => ['info:name', 'data:pic']}<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\uff0c\u5217\u65cf\u4e3ainfo\uff0c\u7248\u672c\u53f7\u6700\u65b05\u4e2a\u7684\u4fe1\u606f
get 'user', 'rk0001', {COLUMN => 'info', VERSIONS => 2}
get 'user', 'rk0001', {COLUMN => 'info:name', VERSIONS => 5}
get 'user', 'rk0001', {COLUMN => 'info:name', VERSIONS => 5, TIMERANGE => [1392368783980, 1392380169184]}<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\uff0ccell\u7684\u503c\u4e3azhangsan\u7684\u4fe1\u606f
get 'people', 'rk0001', {FILTER => \"ValueFilter(=, 'binary:\u56fe\u7247')\"}<\/p>\n

\u83b7\u53d6user\u8868\u4e2drow key\u4e3ark0001\uff0c\u5217\u6807\u793a\u7b26\u4e2d\u542b\u6709a\u7684\u4fe1\u606f
get 'people', 'rk0001', {FILTER => \"(QualifierFilter(=,'substring:a'))\"}<\/p>\n

put 'user', 'rk0002', 'info:name', 'fanbingbing'
put 'user', 'rk0002', 'info:gender', 'female'
put 'user', 'rk0002', 'info:nationality', '\u4e2d\u56fd'
get 'user', 'rk0002', {FILTER => \"ValueFilter(=, 'binary:\u4e2d\u56fd')\"}<\/p>\n


\u67e5\u8be2user\u8868\u4e2d\u7684\u6240\u6709\u4fe1\u606f
scan 'user'<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u7684\u4fe1\u606f
scan 'user', {COLUMNS => 'info'}
scan 'user', {COLUMNS => 'info', RAW => true, VERSIONS => 5}
scan 'persion', {COLUMNS => 'info', RAW => true, VERSIONS => 3}
\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u548cdata\u7684\u4fe1\u606f
scan 'user', {COLUMNS => ['info', 'data']}
scan 'user', {COLUMNS => ['info:name', 'data:pic']}<\/p>\n


\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u3001\u5217\u6807\u793a\u7b26\u4e3aname\u7684\u4fe1\u606f
scan 'user', {COLUMNS => 'info:name'}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u3001\u5217\u6807\u793a\u7b26\u4e3aname\u7684\u4fe1\u606f,\u5e76\u4e14\u7248\u672c\u6700\u65b0\u76845\u4e2a
scan 'user', {COLUMNS => 'info:name', VERSIONS => 5}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\u548cdata\u4e14\u5217\u6807\u793a\u7b26\u4e2d\u542b\u6709a\u5b57\u7b26\u7684\u4fe1\u606f
scan 'user', {COLUMNS => ['info', 'data'], FILTER => \"(QualifierFilter(=,'substring:a'))\"}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u5217\u65cf\u4e3ainfo\uff0crk\u8303\u56f4\u662f[rk0001, rk0003)\u7684\u6570\u636e
scan 'people', {COLUMNS => 'info', STARTROW => 'rk0001', ENDROW => 'rk0003'}<\/p>\n

\u67e5\u8be2user\u8868\u4e2drow key\u4ee5rk\u5b57\u7b26\u5f00\u5934\u7684
scan 'user',{FILTER=>\"PrefixFilter('rk')\"}<\/p>\n

\u67e5\u8be2user\u8868\u4e2d\u6307\u5b9a\u8303\u56f4\u7684\u6570\u636e
scan 'user', {TIMERANGE => [1392368783980, 1392380169184]}<\/p>\n

\u5220\u9664\u6570\u636e
\u5220\u9664user\u8868row key\u4e3ark0001\uff0c\u5217\u6807\u793a\u7b26\u4e3ainfo:name\u7684\u6570\u636e
delete 'people', 'rk0001', 'info:name'
\u5220\u9664user\u8868row key\u4e3ark0001\uff0c\u5217\u6807\u793a\u7b26\u4e3ainfo:name\uff0ctimestamp\u4e3a1392383705316\u7684\u6570\u636e
delete 'user', 'rk0001', 'info:name', 1392383705316<\/p>\n


\u6e05\u7a7auser\u8868\u4e2d\u7684\u6570\u636e
truncate 'people'<\/p>\n


\u4fee\u6539\u8868\u7ed3\u6784
\u9996\u5148\u505c\u7528user\u8868\uff08\u65b0\u7248\u672c\u4e0d\u7528\uff09
disable 'user'<\/p>\n

\u6dfb\u52a0\u4e24\u4e2a\u5217\u65cff1\u548cf2
alter 'people', NAME => 'f1'
alter 'user', NAME => 'f2'
\u542f\u7528\u8868
enable 'user'<\/p>\n


###disable 'user'(\u65b0\u7248\u672c\u4e0d\u7528)
\u5220\u9664\u4e00\u4e2a\u5217\u65cf\uff1a
alter 'user', NAME => 'f1', METHOD => 'delete' \u6216 alter 'user', 'delete' => 'f1'<\/p>\n

\u6dfb\u52a0\u5217\u65cff1\u540c\u65f6\u5220\u9664\u5217\u65cff2
alter 'user', {NAME => 'f1'}, {NAME => 'f2', METHOD => 'delete'}<\/p>\n

\u5c06user\u8868\u7684f1\u5217\u65cf\u7248\u672c\u53f7\u6539\u4e3a5
alter 'people', NAME => 'info', VERSIONS => 5
\u542f\u7528\u8868
enable 'user'<\/p>\n


\u5220\u9664\u8868
disable 'user'
drop 'user'<\/p>\n


get 'person', 'rk0001', {FILTER => \"ValueFilter(=, 'binary:\u4e2d\u56fd')\"}
get 'person', 'rk0001', {FILTER => \"(QualifierFilter(=,'substring:a'))\"}
scan 'person', {COLUMNS => 'info:name'}
scan 'person', {COLUMNS => ['info', 'data'], FILTER => \"(QualifierFilter(=,'substring:a'))\"}
scan 'person', {COLUMNS => 'info', STARTROW => 'rk0001', ENDROW => 'rk0003'}<\/p>\n

scan 'person', {COLUMNS => 'info', STARTROW => '20140201', ENDROW => '20140301'}
scan 'person', {COLUMNS => 'info:name', TIMERANGE => [1395978233636, 1395987769587]}
delete 'person', 'rk0001', 'info:name'<\/p>\n

alter 'person', NAME => 'ffff'
alter 'person', NAME => 'info', VERSIONS => 10<\/p>\n


get 'user', 'rk0002', {COLUMN => ['info:name', 'data:pic']}<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

hbase\u7684java<\/a> api<\/strong><\/span><\/p>\n

1\u3001\u5efa\u7acbhbase\u5de5\u7a0b<\/p>\n

\"\"<\/p>\n

\u3000\u3000\u63a8\u8350http:\/\/hbase.apache.org\/<\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

.\u5173\u4e8eHBase\u7684\u66f4\u591a\u6280\u672f\u7ec6\u8282\uff0c\u5f3a\u70c8\u5fc5\u591a\u770b<\/strong><\/span><\/p>\n

http:\/\/abloz.com\/hbase\/book.html<\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u30002\u3001\u3000weekend110-hbase -> Build Path -> Configure Build Path<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\u3000\u3000\u6211\u4eec\u662fhadoop-2.4.1.jar\uff0c\u4f46\u662f\uff0chbase-0.96.2-hadoop2-bin.tar.gz\u81ea\u5e26\u7684\u662f\uff0chadoop-2.2.0.jar\u3002<\/p>\n

<\/p>\n

\u8fd9\u91cc\uff0c\u6211\u53c2\u8003\u4e86\u300aHadoop \u5b9e\u6218\u300b\uff0c\u9646\u5609\u6052\u8001\u5e08\u7f16\u8457\u7684\u3002P249\u9875\uff0c<\/span><\/p>\n

\u3000\u3000\u6ce8\u610f\uff1a\u5b89\u88c5Hadoop\u7684\u65f6\u5019\uff0c\u8981\u6ce8\u610fHBase\u7684\u7248\u672c\u3002\u4e5f\u5c31\u662f\u8bf4\uff0c\u9700\u8981\u6ce8\u610fHadoop\u548cHBase\u4e4b\u95f4\u7684\u7248\u672c\u5173\u7cfb\uff0c\u5982\u679c\u4e0d\u5339\u914d\uff0c\u5f88\u53ef\u80fd\u4f1a\u5f71\u54cdHBase\u7cfb\u7edf\u7684\u7a33\u5b9a\u6027\u3002\u5728HBase\u7684lin\u76ee\u5f55\u4e0b\u53ef\u4ee5\u770b\u5230\u5bf9\u5e94\u7684Hadoop\u7684JAR\u6587\u4ef6\u3002\u9ed8\u8ba4\u60c5\u51b5\u4e0b\uff0cHBase\u7684lib\u6587\u4ef6\u5939\u4e0b\u5bf9\u5e94\u7684Hadoop\u7248\u672c\u76f8\u5bf9\u7a33\u5b9a\u3002\u5982\u679c\u7528\u6237\u60f3\u8981\u4f7f\u7528\u5176\u4ed6\u7684Hadoop\u7248\u672c\uff0c\u90a3\u4e48\u9700\u8981\u5c06Hadoop\u7cfb\u7edf\u5b89\u88c5\u76ee\u5f55hadoop-*.*.*-core.jar\u6587\u4ef6\u548chadoop-*.*.*-test.jar\u6587\u4ef6\u590d\u5236\u5230HBase\u7684lib\u6587\u4ef6\u5939\u4e0b\uff0c\u4ee5\u66ff\u6362\u5176\u4ed6\u7248\u672c\u7684Hadoop\u6587\u4ef6\u3002<\/span><\/p>\n

\u3000\u3000\"\"<\/p>\n

\u3000\u3000<\/p>\n

\u3000\u3000\u8fd9\u91cc\uff0c\u6211\u4eec\u4e3a\u4e86\u65b9\u4fbf\uff0c\u76f4\u63a5\u628aD:\\SoftWare\\hbase-0.96.2-hadoop2\\lib\u7684\u6240\u6709jar\u5305\uff0c\u90fd\u5f04\u8fdb\u6765\u3002<\/p>\n

\u4e5f\u53c2\u8003\u4e86\u7f51\u4e0a\u4e00\u4e9b\u535a\u5ba2\u8d44\u6599\u8bf4\uff0c\u4e0d\u9700\u8fd9\u4e48\u591a\u3002\u6b64\u5916\uff0c\u7a0b\u5e8f\u53ef\u80fd\u5305\u542b\u4e00\u4e9b\u95f4\u63a5\u5f15\u7528\uff0c\u4ee5\u540e\u518d\u9010\u6b65\u9010\u4e2a\uff0c\u4e0b\u8f7d\uff0c\u6dfb\u52a0\u5c31\u662f\u3002\u590d\u5236\u7c98\u8d34\u5230hbase-0.96.2-hadoop2lib \u91cc\u3002<\/p>\n

\u53bbhttp:\/\/mvnrepository.com\/<\/a>\u3002<\/p>\n

\u3000\u3000\u53c2\u8003\u6211\u7684\u535a\u5ba2<\/p>\n

Eclipse\u4e0b\u65b0\u5efaMaven\u9879\u76ee\u3001\u81ea\u52a8\u6253\u4f9d\u8d56jar\u5305<\/a><\/h1>\n

\u3000\u3000\u53c2\u8003 :http:\/\/blog.csdn.net\/hetianguang\/article\/details\/51371713<\/p>\n

\u3000\u3000\u3000\u3000 http:\/\/www.cnblogs.com\/NicholasLee\/archive\/2012\/09\/13\/2683432.html<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

<\/p>\n

\u3000\u30003\u3001\u65b0\u5efa\u5305cn.itcast.bigdata.hbase<\/p>\n

\"\"<\/p>\n

\"\"<\/p>\n

<\/p>\n

\u3000\u30004\u3001\u65b0\u5efa\u7c7bHbaseDao.java<\/p>\n

\"\"<\/p>\n

<\/p>\n

<\/p>\n

\u3000\u3000\u8fd9\u91cc\uff0c\u6211\u5c31\u4ee5\u5206\u5e03\u5f0f\u96c6\u7fa4\u7684\u914d\u7f6e\uff0c\u9644\u4e0a\u4ee3\u7801\u3002\u5de5\u4f5c\u4e2d\uff0c\u5c31\u662f\u8fd9\u4e48\u5e72\u7684\uff01<\/p>\n

package cn.itcast.bigdata.hbase;<\/p>\n

import java.io.IOException;
import java.util.ArrayList;<\/p>\n

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;<\/p>\n

public class HbaseDao {<\/p>\n

\t
\t@Test
\tpublic void insertTest() throws Exception{

\t\t\u3000\u3000Configuration conf = HBaseConfiguration.create();
\t\t\u3000\u3000conf.set(\"hbase.zookeeper.quorum\", \"weekend05:2181,weekend06:2181,weekend07:2181\");

\t\t\u3000\u3000HTable nvshen = new HTable(conf, \"nvshen\");

\t\t\u3000\u3000Put name = new Put(Bytes.toBytes(\"rk0001\"));
\t\t\u3000\u3000name.add(Bytes.toBytes(\"base_info\"), Bytes.toBytes(\"name\"), Bytes.toBytes(\"angelababy\"));

\t\t\u3000\u3000Put age = new Put(Bytes.toBytes(\"rk0001\"));
\t\t\u3000\u3000age.add(Bytes.toBytes(\"base_info\"), Bytes.toBytes(\"age\"), Bytes.toBytes(18));

\t\t\u3000\u3000ArrayList<Put> puts = new ArrayList<>();
\t\t\u3000\u3000puts.add(name);
\t\t\u3000\u3000puts.add(age);

\t\t\u3000\u3000nvshen.put(puts);

\t}




\tpublic static void main(String[] args) throws Exception {

\t\t\u3000\u3000Configuration conf = HBaseConfiguration.create();
\t\t\u3000\u3000conf.set(\"hbase.zookeeper.quorum\", \"weekend05:2181,weekend06:2181,weekend07:2181\");

\t\t\u3000\u3000HBaseAdmin admin = new HBaseAdmin(conf);

\t\t\u3000\u3000TableName name = TableName.valueOf(\"nvshen\");


\t\t\u3000\u3000HTableDescriptor desc = new HTableDescriptor(name);


\t\t\u3000\u3000HColumnDescriptor base_info = new HColumnDescriptor(\"base_info\");
\t\t\u3000\u3000HColumnDescriptor extra_info = new HColumnDescriptor(\"extra_info\");
\t\t\u3000\u3000base_info.setMaxVersions(5);

\t\t\u3000\u3000desc.addFamily(base_info);
\t\t\u3000\u3000desc.addFamily(extra_info);

\t\t\u3000\u3000admin.createTable(desc);


\t\u3000\u3000}



}<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

<\/p>\n

\u6216\u8005HbaseDemo.java<\/p>\n

\"\"<\/p>\n

package cn.itcast.bigdata.hbase;<\/p>\n

import java.io.IOException;
import java.util.ArrayList;<\/p>\n

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;<\/p>\n

public class HbaseDao {<\/p>\n

\t
\t\u3000\u3000@Test
\t\u3000\u3000public void insertTest() throws Exception{

\t\t\u3000\u3000\u3000\u3000Configuration conf = HBaseConfiguration.create();
\t\t\u3000\u3000\u3000\u3000conf.set(\"hbase.zookeeper.quorum\", \"weekend05:2181,weekend06:2181,weekend07:2181\");

\t\t\u3000\u3000\u3000\u3000HTable nvshen = new HTable(conf, \"nvshen\");

\t\t\u3000\u3000\u3000\u3000Put name = new Put(Bytes.toBytes(\"rk0001\"));
\t\t\u3000\u3000\u3000\u3000name.add(Bytes.toBytes(\"base_info\"), Bytes.toBytes(\"name\"), Bytes.toBytes(\"angelababy\"));

\t\t\u3000\u3000\u3000\u3000Put age = new Put(Bytes.toBytes(\"rk0001\"));
\t\t\u3000\u3000\u3000\u3000age.add(Bytes.toBytes(\"base_info\"), Bytes.toBytes(\"age\"), Bytes.toBytes(18));

\t\t\u3000\u3000\u3000\u3000ArrayList<Put> puts = new ArrayList<>();
\t\t\u3000\u3000\u3000\u3000puts.add(name);
\t\t\u3000\u3000\u3000\u3000puts.add(age);

\t\t\u3000\u3000\u3000\u3000nvshen.put(puts);

\t}




\tpublic static void main(String[] args) throws Exception {

\t\t\u3000\u3000Configuration conf = HBaseConfiguration.create();
\t\t\u3000\u3000conf.set(\"hbase.zookeeper.quorum\", \"weekend05:2181,weekend06:2181,weekend07:2181\");

\t\t\u3000\u3000HBaseAdmin admin = new HBaseAdmin(conf);

\t\t\u3000\u3000TableName name = TableName.valueOf(\"nvshen\");


\t\t\u3000\u3000HTableDescriptor desc = new HTableDescriptor(name);


\t\t\u3000\u3000HColumnDescriptor base_info = new HColumnDescriptor(\"base_info\");
\t\t\u3000\u3000HColumnDescriptor extra_info = new HColumnDescriptor(\"extra_info\");
\t\t\u3000\u3000base_info.setMaxVersions(5);

\t\t\u3000\u3000desc.addFamily(base_info);
\t\t\u3000\u3000desc.addFamily(extra_info);

\t\t\u3000\u3000admin.createTable(desc);


\t\u3000\u3000}



}<\/p>\n

<\/p><\/div> <\/div>\n <\/div>","orderid":"0","title":"5 hbase-shell +   hbase\u7684java api","smalltitle":"","mid":"0","fname":"HBase","special_id":"0","bak_id":"0","info":"0","hits":"449","pages":"1","comments":"0","posttime":"2019-05-15 13:35:02","list":"1557898502","username":"admin","author":"","copyfrom":"","copyfromurl":"","titlecolor":"","fonttype":"0","titleicon":"0","picurl":"https:\/\/www.cppentry.com\/upload_files\/","ispic":"0","yz":"1","yzer":"","yztime":"0","levels":"0","levelstime":"0","keywords":"hbase-shell<\/A>  <\/A> hbase<\/A> java<\/A> api<\/A>","jumpurl":"","iframeurl":"","style":"","template":"a:3:{s:4:\"foot\";s:0:\"\";s:8:\"bencandy\";s:0:\"\";s:4:\"head\";s:0:\"\";}","target":"0","ip":"47.106.78.186","lastfid":"0","money":"0","buyuser":"","passwd":"","allowdown":"","allowview":"","editer":"","edittime":"0","begintime":"0","endtime":"0","description":" \u672c\u535a\u6587\u7684\u4e3b\u8981\u5185\u5bb9\u6709\u3000\u3000.HBase\u7684\u5355\u673a\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u5b89\u88c5\u3000\u3000.HBase\u7684\u5355\u673a\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u7684\u542f\u52a8\u3000\u3000.HBase\u7684\u4f2a\u5206\u5e03\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u5b89\u88c5\u3000\u3000 .HBase\u7684\u4f2a\u5206\u5e03\u6a21\u5f0f\uff081\u8282\u70b9\uff09\u7684\u542f\u52a8\u3000\u3000 .HBase\u7684\u5206\u5e03\u6a21\u5f0f\uff083\u30015\u8282\u70b9\uff09\u5b89\u88c5\u3000 .HBase\u7684\u5206..","lastview":"1714043971","digg_num":"0","digg_time":"0","forbidcomment":"0","ifvote":"0","heart":"","htmlname":"","city_id":"0"},"page":"1"}