设为首页 加入收藏

TOP

java连接Hbase 1.4.0集群进行增删改查示例
2018-12-16 09:44:00 】 浏览:19
Tags:java 连接 Hbase 1.4.0 集群 进行 删改 示例
版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/nmgrd/article/details/79038080

1.本示例的项目结构如下图所示:


2.示例中的配置文件hbase-site.xml,只需将Hbase集群中的同名配置文件拷贝进来即可,该配置文件内容如下:

<xml version="1.0">
<xml-stylesheet type="text/xsl" href="configuration.xsl">
<!--
/**
 *
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
-->
<configuration>
  <property>  
    <name>hbase.rootdir</name>  
    <value>hdfs://node1:9000/hbase</value>
  </property>  
  <property>  
     <name>hbase.cluster.distributed</name>  
     <value>true</value>  
  </property>  
  <property>  
      <name>hbase.master</name>  
      <value>node1:60000</value>
  </property>  
   <property>  
    <name>hbase.zookeeper.property.dataDir</name>  
    <value>/usr/java/zookeeper3.4.10/temp</value>  
  </property>  
  <property>  
    <name>hbase.zookeeper.quorum</name>  
    <value>node1,node2,node3</value>
  </property>  
  <property>  
    <name>hbase.zookeeper.property.clientPort</name>  
    <value>2181</value>  
  </property>  
</configuration>


3.配置文件hbase-site.xml中提到的node1、node2、node3分别指代集群中ip是192.168.209.129、192.168.209.130、192.168.209.131的节点,为了让运行在windows上的这个java操作hbase的示例正常运行起来,编辑hosts文件(该文件在路径:C:\Windows\System32\drivers\etc之下),在该文件的末尾追加3行即可,追加的内容是:

192.168.209.129 node1
192.168.209.130 node2
192.168.209.131 node3

4.本示例的java代码如下:

package com.hbase.core;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class HbaseDemo {

    private static Configuration conf = HBaseConfiguration.create();
    private static Admin admin;


    static {
//        conf.set("hbase.rootdir", "hdfs://192.168.209.129:9000/hbase");
        conf.set("hbase.rootdir", "hdfs://node1:9000/hbase");
        // 设置Zookeeper,直接设置IP地址
//        conf.set("hbase.zookeeper.quorum", "192.168.209.129,192.168.209.130,192.168.209.131");
        conf.set("hbase.zookeeper.quorum", "node1,node2,node3");

        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            admin = connection.getAdmin();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /**
     * 创建表,可以同时创建多个列簇
     *
     * @param tableName
     * @param columnFamily
     */
    public void createTable(String tableName, String... columnFamily) {
        TableName tableNameObj = TableName.valueOf(tableName);
        try {
            if (this.admin.tableExists(tableNameObj)) {
                System.out.println("Table : " + tableName + " already exists !");
            } else {
                HTableDescriptor td = new HTableDescriptor(tableNameObj);
                int len = columnFamily.length;
                for (int i = 0; i < len; i++) {
                    HColumnDescriptor family = new HColumnDescriptor(columnFamily[i]);
                    td.addFamily(family);
                }
                admin.createTable(td);
                System.out.println(tableName + " 表创建成功!");
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println(tableName + " 表创建失败!");
        }
    }

    @Test
    public void testCreateTable() {
        createTable("cross_history", "carinfo", "parkInfo", "deviceInfo");
    }

    public void delTable(String tableName) {
        TableName tableNameObj = TableName.valueOf(tableName);
        try {
            if (this.admin.tableExists(tableNameObj)) {
                admin.disableTable(tableNameObj);
                admin.deleteTable(tableNameObj);
                System.out.println(tableName + " 表删除成功!");
            } else {
                System.out.println(tableName + " 表不存在!");
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println(tableName + " 表删除失败!");
        }
    }

    @Test
    public void testDelTable() {
        delTable("cross_history");
    }

    public void insertRecord(String tableName, String rowKey, String columnFamily, String qualifier, String value) {
        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            Table table = connection.getTable(TableName.valueOf(tableName));
            Put put = new Put(rowKey.getBytes());

            put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(qualifier), Bytes.toBytes(value));
            put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(qualifier), Bytes.toBytes(value));
            table.put(put);
            table.close();
            connection.close();
            System.out.println(tableName + " 表插入数据成功!");
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println(tableName + " 表插入数据失败!");
        }
    }

    @Test
    public void testInsertRecord() {
        insertRecord("cross_history", "001", "carinfo", "plateNo", "浙A12345");
        insertRecord("cross_history", "002", "carinfo", "plateNo", "浙A12345");
        insertRecord("cross_history", "003", "carinfo", "plateNo", "浙A12345");
        insertRecord("cross_history", "001", "parkInfo", "parkName", "中兴花园");
        insertRecord("cross_history", "002", "parkInfo", "parkName", "中兴花园");
        insertRecord("cross_history", "003", "parkInfo", "parkName", "中兴花园");
        insertRecord("cross_history", "001", "deviceInfo", "deviceInfo", "道闸");
        insertRecord("cross_history", "002", "deviceInfo", "deviceInfo", "道闸");
        insertRecord("cross_history", "003", "deviceInfo", "deviceInfo", "道闸");
    }

    public void deleteRecord(String tableName, String rowKey) {
        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            Table table = connection.getTable(TableName.valueOf(tableName));
            Delete del = new Delete(rowKey.getBytes());
            table.delete(del);
            System.out.println(tableName + " 表删除数据成功!");
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println(tableName + " 表删除数据失败!");
        }
    }

    @Test
    public void testDeleteRecord() {
        deleteRecord("cross_history", "001");
    }

    public Result getOneRecord(String tableName, String rowKey) {
        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            Table table = connection.getTable(TableName.valueOf(tableName));
            Get get = new Get(rowKey.getBytes());
            Result rs = table.get(get);
            System.out.println(tableName + " 表获取数据成功!");


            System.out.println("rowkey为:" + rowKey);
            List<Cell> cells = rs.listCells();
            if (cells != null) {
                for (Cell cell : cells) {
                    System.out.println(new String(cell.getFamily()) + " : " + new String(cell.getQualifier()) + " : " + new String(cell.getValue()));
                }
            }

            return rs;
        } catch (IOException e) {
            e.printStackTrace();
            return null;
        }

    }

    @Test
    public void testGetOneRecord() {
        getOneRecord("cross_history", "002");
    }

    public List<Result> getAll(String tableName) {
        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            Table table = connection.getTable(TableName.valueOf(tableName));
            Scan scan = new Scan();
            ResultScanner scanner = table.getScanner(scan);
            List<Result> list = new ArrayList<Result>();
            for (Result r : scanner) {
                list.add(r);
            }
            scanner.close();
            System.out.println(tableName + " 表获取所有记录成功!");
            return list;
        } catch (IOException e) {
            e.printStackTrace();
            return null;
        }
    }

    @Test
    public void testGetAll() {
        System.out.println(getAll("cross_history"));
    }

    // 创建表
    public static void createTable(String tablename, String columnFamily) throws Exception {
        Connection connection = ConnectionFactory.createConnection(conf);
        Admin admin = connection.getAdmin();

        TableName tableNameObj = TableName.valueOf(tablename);

        if (admin.tableExists(tableNameObj)) {
            System.out.println("Table exists!");
            System.exit(0);
        } else {
            HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tablename));
            tableDesc.addFamily(new HColumnDescriptor(columnFamily));
            admin.createTable(tableDesc);
            System.out.println("create table success!");
        }
        admin.close();
        connection.close();
    }

    // 删除表
    public static void deleteTable(String tableName) {
        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            Admin admin = connection.getAdmin();
            TableName table = TableName.valueOf(tableName);
            admin.disableTable(table);
            admin.deleteTable(table);
            System.out.println("delete table " + tableName + " ok.");
        } catch (IOException e) {
            e.printStackTrace();
        }

    }

    // 插入一行记录
    public static void addRecord(String tableName, String rowKey, String family, String qualifier, String value) {
        try {
            Connection connection = ConnectionFactory.createConnection(conf);
            Table table = connection.getTable(TableName.valueOf(tableName));
            Put put = new Put(Bytes.toBytes(rowKey));
            put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
            put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
            table.put(put);
            table.close();
            connection.close();
            System.out.println("insert recored " + rowKey + " to table " + tableName + " ok.");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public static void main(String[] args) throws Exception {
        HbaseDemo.createTable("testTb", "info");
        HbaseDemo.addRecord("testTb", "001", "info", "name", "zhangsan");
        HbaseDemo.addRecord("testTb", "001", "info", "age", "20");
        //HbaseDao.deleteTable("testTb");
    }


}

5.本示例的maven pom文件配置如下:

<xml version="1.0" encoding="UTF-8">
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com</groupId>
    <artifactId>hbase</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <junit.version>4.11</junit.version>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>1.4.0</version>
        </dependency>

        <!--   单元测试 -->
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>${junit.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.7.4</version>
        </dependency>

        <dependency>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
            <version>1.8</version>
            <scope>system</scope>
            <systemPath>C:\Program Files\Java\jdk1.8.0_152/lib/tools.jar</systemPath>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-surefire-plugin</artifactId>
                <configuration>
                    <skip>true</skip>
                </configuration>
            </plugin>
        </plugins>
        <pluginManagement>
            <plugins>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-eclipse-plugin</artifactId>
                    <version>2.9</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-surefire-plugin</artifactId>
                    <version>2.10</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-jar-plugin</artifactId>
                    <version>2.3.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-source-plugin</artifactId>
                    <version>2.1.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-compiler-plugin</artifactId>
                    <version>3.1</version>
                    <configuration>
                        <source>1.7</source>
                        <target>1.7</target>
                        <encoding>UTF-8</encoding>
                    </configuration>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-war-plugin</artifactId>
                    <version>2.0.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-deploy-plugin</artifactId>
                    <version>2.7</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-dependency-plugin</artifactId>
                    <version>2.0</version>
                </plugin>
            </plugins>
        </pluginManagement>
    </build>

</project>


】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
上一篇Java 操作Hbase 简单案例 (Kerber.. 下一篇HBase的缓存和批量处理

最新文章

热门文章

Hot 文章

Python

C 语言

C++基础

大数据基础

linux编程基础

C/C++面试题目