springboot hbase,#新建hbase表&
1.添加依赖
<hbase.version>2.2.4</hbase.version>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.9.4</version>
</dependency>
2.配置信息
# database - hbase
datasource.hbase.zookeeper.quorum=192.169.9.225
datasource.hbase.zookeeper.port=22181
datasource.hbase.zookeeper.znode.parent=
datasource.hbase.table.myHbase=myHbase
设置预分区,能很好避免写入的时候会存在热点写的问题,同时不断的写入,hbase的region会自己进行split,这会消耗hbase的资源近而影响写入。
# 新建hbase表,我写入的数据是1-500000的循环写入
create 'myHbase','info' ,SPLITS=>['10','20','30','40']
3.hbaseConfig类
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.io.IOException;
@Configuration
public class HBaseConfig {
@Value("${datasource.hbase.zookeeper.quorum}")
private String zookeeper;
@Value("${datasource.hbase.zookeeper.znode.parent}")
private String parent;
@Value("${datasource.hbase.zookeeper.port}")
private String port;
@Value("${datasource.hbase.table.myHbase}")
private String tableName;
public Connection getConnection() throws IOException {
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", zookeeper);
config.set("hbase.zookeeper.property.clientPort", port);
if (parent != null && !"".equals(parent)) {
config.set("zookeeper.znode.parent", parent);
}
Connection connection = ConnectionFactory.createConnection(config);
return connection;
}
@Bean(name = "hbaseTable")
public Table getHbaseTable() throws IOException {
Connection connection = getConnection();
Table table = connection.getTable(TableName.valueOf(tableName));
return table;
}
}
4.使用
import com.test.dspriskadmin.dao.mysql.SdkUidMapper;
import lombok.val;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.io.IOException;
import java.util.*;
@Service
public class SdkUidService {
@Autowired
private SdkUidMapper sdkUidMapper;
@Resource(name = "hbaseTable")
private Table hbaseTable;
public void insertSdkUidToHbase(){
try {
val sdkUIdList = sdkUidMapper.queryUidList();
System.out.println(sdkUIdList);
sdkUIdList.forEach(sdkUId -> {
Put put = new Put(Bytes.toBytes(sdkUId.getUid().toString()));
put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("uid"),
Bytes.toBytes(sdkUId.getUid().toString()));
try {
hbaseTable.put(put);
} catch (IOException e) {
e.printStackTrace();
}
});
hbaseTable.close();
}catch (IOException e) {
e.printStackTrace();
}
}
}
本站文章为和通数据库网友分享或者投稿,欢迎任何形式的转载,但请务必注明出处.
同时文章内容如有侵犯了您的权益,请联系QQ:970679559,我们会在尽快处理。