hbase-JAVA API

jar包

hdfs
hbase-client

1
2
3
4
5
6
7
8
9
10
11
12
13
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.2</version>
</dependency>

DDL操作基本流程梳理

  1. 建立连接
    Connection conn = ConnectionFactory.createConnection(conf);
  2. 得到admin(org.apache.hadoop.hbase.client.Admin);
    Admin admin = conn.getAdmin();

  3. HTableDescriptor 组装元信息
    HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tablename));
    tableDesc.addFamily(new HColumnDescriptor(columnFamily));

  4. admin 结合HTableDescriptor 进行DDL操作
    admin.createTable(tableDesc);
  5. 释放资源
    admin.close();
    conn.close();
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    public static void createTable(String tablename, String columnFamily) throws Exception{
    Connection conn = ConnectionFactory.createConnection(conf);
    Admin admin = conn.getAdmin();
    TableName tableNameObj = TableName.valueOf(tablename);
    if (admin.tableExists(tableNameObj)) {
    System.out.println("Table exists!");
    System.exit(0);
    } else {
    HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tablename));
    tableDesc.addFamily(new HColumnDescriptor(columnFamily));
    admin.createTable(tableDesc);
    System.out.println("create table success!");
    }
    admin.close();
    conn.close();
    }

DML 操作基本流程梳理

  1. 建立连接
    Connection conn = ConnectionFactory.createConnection(conf);
  2. 得到table(org.apache.hadoop.hbase.client.Table);
    Table table = connection.getTable(TableName.valueOf(tableName));
  3. 组装put或者get对象
    Put put = new Put(Bytes.toBytes(rowKey));
    put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
    put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
  4. table+put(get)执行
    table.put(put);
  5. 释放资源
    table.close();
    connection.close();
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    public static void addRecord(String tableName, String rowKey, String family, String qualifier, String value){
    try {
    Connection connection = ConnectionFactory.createConnection(conf);
    Table table = connection.getTable(TableName.valueOf(tableName));
    Put put = new Put(Bytes.toBytes(rowKey));
    put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
    put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
    table.put(put);
    table.close();
    connection.close();
    System.out.println("insert recored " + rowKey + " to table " + tableName + " ok.");
    } catch (IOException e) {
    e.printStackTrace();
    }
    }

资源列表

[Windows+Eclipse+Maven+HBase 1.2.4开发环境搭建](0http://blog.csdn.net/chengyuqiang/article/details/69568496)