一、从Hbase表1中读取数据再把统计结果存到表2
在Hbase中建立相应的表1:
create'hello','cf'
put'hello','1','cf:hui','hello world'
put'hello','2','cf:hui','hello hadoop'
put'hello','3','cf:hui','hello hive'
put'hello','4','cf:hui','hello hadoop'
put'hello','5','cf:hui','hello world'
put'hello','6','cf:hui','hello world'
java代码:
importjava.io.IOException;
importjava.util.Iterator;
importorg.apache.hadoop.conf.Configuration;
importorg.apache.hadoop.hbase.HBaseConfiguration;
importorg.apache.hadoop.hbase.HColumnDescriptor;
importorg.apache.hadoop.hbase.HTableDescriptor;
importorg.apache.hadoop.hbase.client.HBaseAdmin;
importorg.apache.hadoop.hbase.client.Put;
importorg.apache.hadoop.hbase.client.Result;
importorg.apache.hadoop.hbase.client.Scan;
importorg.apache.hadoop.hbase.io.ImmutableBytesWritable;
importorg.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
importorg.apache.hadoop.hbase.mapreduce.TableMapper;
importorg.apache.hadoop.hbase.mapreduce.TableReducer;
importorg.apache.hadoop.hbase.util.Bytes;
importorg.apache.hadoop.io.IntWritable;
importorg.apache.hadoop.io.NullWritable;
importorg.apache.hadoop.io.Text;
importorg.apache.hadoop.mapreduce.Job;
publicclassHBaseToHbase{
publicstaticvoidmain(String[] args)throwsIOException, ClassNotFoundException, InterruptedException{
String hbaseTableName1 ="hello";
String hbaseTableName2 ="mytb2";
prepareTB2(hbaseTableName2);
Configuration conf =newConfiguration();
Job job = Job.getInstance(conf);
job.setJarByClass(HBaseToHbase.class);
job.setJobName("mrreadwritehbase");
Scan scan =newScan();
scan.setCaching(500);
scan.setCacheBlocks(false);
TableMapReduceUtil.initTableMapperJob(hbaseTableName1, scan, doMapper.class, Text.class, IntWritable.class, job);
TableMapReduceUtil.initTableReducerJob(hbaseTableName2, doReducer.class, job);
System.exit(job.waitForCompletion(true) ?1:0);
}
publicstaticclassdoMapperextendsTableMapper{
privatefinalstaticIntWritable one =newIntWritable(1);
@Override
protectedvoidmap(ImmutableBytesWritable key, Result value, Context context)throwsIOException, InterruptedException{
String rowValue = Bytes.toString(value.list().get(0).getValue());
context.write(newText(rowValue), one);
}
}
publicstaticclassdoReducerextendsTableReducer{
@Override
protectedvoidreduce(Text key, Iterable values, Context context)throwsIOException, InterruptedException{
System.out.println(key.toString());
intsum =0;
Iterator haha = values.iterator();
while(haha.hasNext()) {
sum += haha.next().get();
}
Put put =newPut(Bytes.toBytes(key.toString()));
put.add(Bytes.toBytes("mycolumnfamily"), Bytes.toBytes("count"), Bytes.toBytes(String.valueOf(sum)));
context.write(NullWritable.get(), put);
}
}
publicstaticvoidprepareTB2(String hbaseTableName)throwsIOException{
HTableDescriptor tableDesc =newHTableDescriptor(hbaseTableName);
HColumnDescriptor columnDesc =newHColumnDescriptor("mycolumnfamily");
tableDesc.addFamily(columnDesc);
Configuration cfg = HBaseConfiguration.create();
HBaseAdmin admin =newHBaseAdmin(cfg);
if(admin.tableExists(hbaseTableName)) {
System.out.println("Table exists,trying drop and create!");
admin.disableTable(hbaseTableName);
admin.deleteTable(hbaseTableName);
admin.createTable(tableDesc);
}else{
System.out.println("create table: "+ hbaseTableName);
admin.createTable(tableDesc);
}
}
}
在Linux中执行该代码:
[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/javac HBaseToHbase.java
[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/jar cvf xx.jar HBaseToHbase*class
[hadoop@h71 q1]$ hadoop jar xx.jar HBaseToHbase
查看mytb2表:
hbase(main):009:0> scan'mytb2'
ROW COLUMN+CELL
hello hadoop column=mycolumnfamily:count, timestamp=1489817182454,value=2
hello hive column=mycolumnfamily:count, timestamp=1489817182454,value=1
hello world column=mycolumnfamily:count, timestamp=1489817182454,value=3
3row(s)in0.0260seco
作者:清风_d587
链接:https://www.jianshu.com/p/01411078c449
共同学习,写下你的评论
评论加载中...
作者其他优质文章