hbaseDEMO,
import
org.apache.Hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.HColumnDescriptor;
import
org.apache.hadoop.hbase.HTableDescriptor;
import
org.apache.hadoop.hbase.KeyValue;
import
org.apache.hadoop.hbase.client.HBaseAdmin;
import
org.apache.hadoop.hbase.client.HTable;
import
org.apache.hadoop.hbase.client.Result;
import
org.apache.hadoop.hbase.client.ResultScanner;
import
org.apache.hadoop.hbase.client.Scan;
import
org.apache.hadoop.hbase.io.BatchUpdate;
public class HBaseDEMO {
//定义配置对象HBaseConfiguration
static
HBaseConfiguration cfg
=null;
static
{
Configuration configuration = new
Configuration();
cfg = new
HBaseConfiguration(configuration);
}
//创建一张表,指定表名,列族
public
static void CreateTable(String tableName,String columnFarily)throws
Exception{
HBaseAdmin admin = new
HBaseAdmin(cfg);
if(admin.tableExists(tableName)){
System.out.println(tableName+"不存在!");
System.exit(0);
}else{
HTableDescriptor tableDesc = new
HTableDescriptor(tableName);
tableDesc.addFamily(new
HColumnDescriptor(columnFarily+":"));
System.out.println("创建表成功!");
}
}
//添加数据,通过HTable。和BatchUpdate为已经存在的表添加数据data
public
static void addData(String tableName,String row,String
columnFamily,String column,String data)throws
Exception{
HTable table = new
HTable(cfg,tableName);
BatchUpdate update = new
BatchUpdate(row);
update.put(columnFamily+":"+column,
data.getBytes());
table.commit(update);