通过HBaseAdmin维护表(创建,删除表)
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
public class CreateHbaseTable {
public static void main(String[] args) throws Exception {
String tbl = "ericni_test";
Configuration config = HBaseConfiguration. create();
config.set( "hbase.zookeeper.quorum", "xxxx" );
config.set( "hbase.zookeeper.property.clientPort" , "2181" );
HBaseAdmin admin = new HBaseAdmin(config);
HTableDescriptor[] tables = admin.listTables();
for(HTableDescriptor t:tables){
System. out.println(t.getNameAsString());
}
if (admin.tableExists(tbl.getBytes("utf8" ))) {
System. out.println("table already exists!" );
} else {
System. out.println("table not already exists!create now!" );
creatTable(admin,tbl,new String[]{ "info"});
}
}
public static void creatTable(HBaseAdmin admin,String tableName, String[] familys) throws Exception {
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
for(int i=0; i<familys.length; i++){
tableDesc.addFamily( new HColumnDescriptor(familys[i]));
}
try{
admin.createTable(tableDesc);
System. out.println("create table " + tableName + " ok.");
} catch (Exception err) {
System. out.println("create table " + tableName + " failed. Error log is " + err);
err.printStackTrace();
}
}
public static void deleteTable(HBaseAdmin admin,String tableName) throws Exception {
try{
admin.disableTable(tableName);
admin.deleteTable(tableName);
System. out.println("delete table " + tableName + " ok.");
} catch(Exception err){
System. out.println("delete table " + tableName + " failed.Error log is " + err);
err.printStackTrace();
}
}
}
2.通过Put写入数据
import java.io.File;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class LoadDataFromFile {
public static void main(String[] args) throws Exception {
String tbl = "ericni_test";
String filename = "/tmp/click.log";
File file = new File(filename);
BufferedReader reader = null;
reader = new BufferedReader(new FileReader(file));
Configuration config = HBaseConfiguration. create();
config.set( "hbase.zookeeper.quorum", "10.100.90.203" );
config.set( "hbase.zookeeper.property.clientPort" , "2181" );
HTable table = new HTable(config, tbl);
HBaseAdmin admin = new HBaseAdmin(config);
if (admin.tableExists(tbl.getBytes("utf8" ))) {
System. out.println("table already exists!" );
try {
try {
String tmpString = null;
int linex = 1;
while ((tmpString = reader.readLine()) != null) {
//System.out.println(tmpString.getClass());
String[] lines = tmpString.split( "\\t");
//String[] newlines = {lines[0],lines[1],lines[2],lines[3],lines[4]};
//String row = StringUtils.join(new String[] {lines[0],lines[1],lines[2],lines[3],lines[4]}, "_");
StringBuffer sb = new StringBuffer();
String row = sb.append(lines[0]).append("_" ).append(lines[1]).append("_").append(lines[2]).append( "_").append(lines[3]).append("_" ).append(lines[4]).toString();
String valuex = lines[lines. length-1];
System. out.println(row);
linex ++;
addRecord(table,tbl,row, "Stat", "Click_cnt", valuex);
}
reader.close();
} catch(IOException err){
err.printStackTrace();
} finally {
try{
if(reader != null) reader.close();
} catch(IOException err){
err.printStackTrace();
}
}
} catch(Exception err){
System. out.println("load data error" );
System. out.println("error log: " + err);
err.printStackTrace();
}
} else {
System. out.println("table not already exists!" );
System. exit(1);
}
table.close();
admin.close();
}
public static void addRecord(HTable table,String tableName, String rowKey, String family, String qualifier, String value) throws Exception{
Put put = new Put(Bytes.toBytes(rowKey));
put.add(Bytes. toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
table.put(put);
System. out.println("insert recored " + rowKey + " to table " + tableName + " ok." );
}
}
3.Scan的Filter操作
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
public class FilterTest {
private static Configuration config = null;
private static HTable table;
private static HTable table2;
private static HTable table3;
static {
try {
config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum" , "xxxxx" );
config.set("hbase.zookeeper.property.clientPort" , "2181" );
} catch (Exception e){
e.printStackTrace();
}
}
public static void selectByRowKey(String tablename,String rowKey) throws IOException{
table = new HTable(config,tablename);
Get g = new Get(Bytes.toBytes(rowKey));
Result r = table.get(g);
for(KeyValue kv:r.raw()){
System. out.println("row : " +new String(kv.getRow()));
System. out.println("column: " +new String(kv.getKey()));
System. out.println("value: " +new String(kv.getValue()));
}
}
public static void selectByRowKeyColumn(String tablename,String rowKey,String column,String qualifier) throws IOException{
table2 = new HTable(config,tablename);
Get g = new Get(Bytes.toBytes(rowKey));
g.addColumn(Bytes. toBytes(column),qualifier.getBytes("utf8"));
Result r = table2.get(g);
for(KeyValue kv:r.raw()){
System. out.println("row : " +new String(kv.getRow()));
System. out.println("column: " +new String(kv.getKey()));
System. out.println("value: " +new String(kv.getValue()));
}
}
public static void selectByFilter(String tablename,List<String> arr) throws IOException{
table3 = new HTable(config,tablename);
FilterList filterList = new FilterList();
Scan s1 = new Scan();
for(String v:arr){
String[] s = v.split( ",");
filterList.addFilter( new SingleColumnValueFilter(Bytes.toBytes(s[0]),
Bytes.toBytes(s[1]),
CompareOp.EQUAL,Bytes.toBytes(s[2])
)
);
s1.addColumn(Bytes. toBytes(s[0]), Bytes.toBytes(s[1]));
}
s1.setFilter(filterList);
ResultScanner ResultScannerFilterList = table3.getScanner(s1);
for(Result rr = ResultScannerFilterList.next();rr != null;rr = ResultScannerFilterList.next()){
for(KeyValue kv:rr.list()){
System. out.println("row : " +new String(kv.getRow()));
System. out.println("column : " +new String(kv.getKey()));
System. out.println("value : " +new String(kv.getValue()));
}
}
}
public static void main(String[] args) throws IOException{
//selectByRowKey("ericni_test","102_2.94_1400342400_00426_01132");
//selectByRowKeyColumn("ericni_test","102_2.94_1400342400_00426_01132"," Stat","Click_cnt");
List<String> arr= new ArrayList<String>();
arr.add( "Stat,Click_cnt,1");
selectByFilter("ericni_test" ,arr);
}
}
亿速云「云服务器」,即开即用、新一代英特尔至强铂金CPU、三副本存储NVMe SSD云盘,价格低至29元/月。点击查看>>
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。