欢迎投稿

今日深度:

springboot hbase,#新建hbase表&

springboot hbase,#新建hbase表&


1.添加依赖

<hbase.version>2.2.4</hbase.version>

		<dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-shaded-client</artifactId>
            <version>${hbase.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.4</version>
        </dependency>
        <dependency>
            <groupId>commons-io</groupId>
            <artifactId>commons-io</artifactId>
            <version>2.4</version>
        </dependency>
        <dependency>
            <groupId>commons-beanutils</groupId>
            <artifactId>commons-beanutils</artifactId>
            <version>1.9.4</version>
        </dependency>

2.配置信息

# database - hbase
datasource.hbase.zookeeper.quorum=192.169.9.225
datasource.hbase.zookeeper.port=22181
datasource.hbase.zookeeper.znode.parent=
datasource.hbase.table.myHbase=myHbase

设置预分区,能很好避免写入的时候会存在热点写的问题,同时不断的写入,hbase的region会自己进行split,这会消耗hbase的资源近而影响写入。

# 新建hbase表,我写入的数据是1-500000的循环写入
create 'myHbase','info' ,SPLITS=>['10','20','30','40']

3.hbaseConfig类

import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.io.IOException;

@Configuration
public class HBaseConfig {

    @Value("${datasource.hbase.zookeeper.quorum}")
    private String zookeeper;

    @Value("${datasource.hbase.zookeeper.znode.parent}")
    private String parent;

    @Value("${datasource.hbase.zookeeper.port}")
    private String port;

    @Value("${datasource.hbase.table.myHbase}")
    private String tableName;

    public Connection getConnection() throws IOException {
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        config.set("hbase.zookeeper.quorum", zookeeper);
        config.set("hbase.zookeeper.property.clientPort", port);
        if (parent != null && !"".equals(parent)) {
            config.set("zookeeper.znode.parent", parent);
        }
        Connection connection = ConnectionFactory.createConnection(config);
        return connection;
    }

    @Bean(name = "hbaseTable")
    public Table getHbaseTable() throws IOException {
        Connection connection = getConnection();
        Table table = connection.getTable(TableName.valueOf(tableName));
        return table;
    }

}

4.使用

import com.test.dspriskadmin.dao.mysql.SdkUidMapper;
import lombok.val;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.io.IOException;
import java.util.*;

@Service
public class SdkUidService {

    @Autowired
    private SdkUidMapper sdkUidMapper;

    @Resource(name = "hbaseTable")
    private Table hbaseTable;

    public void insertSdkUidToHbase(){
        try {
            val sdkUIdList = sdkUidMapper.queryUidList();
            System.out.println(sdkUIdList);
            sdkUIdList.forEach(sdkUId -> {
                Put put = new Put(Bytes.toBytes(sdkUId.getUid().toString()));
                put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("uid"),
                        Bytes.toBytes(sdkUId.getUid().toString()));
                try {
                    hbaseTable.put(put);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            });
            hbaseTable.close();
        }catch (IOException e) {
            e.printStackTrace();
        }
    }
}

www.htsjk.Com true http://www.htsjk.com/hbase/45896.html NewsArticle springboot hbase,#新建hbase表lt;hbase.version2.2.4/hbase.version dependency groupIdorg.apache.hbase/groupId artifactIdhbase-shaded-client/artifactId version${hbase.version}/version /dependency dependency groupIdorg.apache.commons/group...
评论暂时关闭