import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.IOException;
@Service
public class HBaseService {
@Autowired
private Configuration configuration;
public void writeDataToHBase(String tableName, String rowKey, String columnFamily, String column, String value) throws IOException {
// 创建HBase表对应的配置对象
Configuration hbaseConfig = HBaseConfiguration.create(configuration);
// 创建一个表对应的连接
Connection connection = ConnectionFactory.createConnection(hbaseConfig);
// 获取表对象
Table table = connection.getTable(TableName.valueOf(tableName));
// 创建一个Put对象
Put put = new Put(Bytes.toBytes(rowKey));
// 添加列和对应的值
put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(column), Bytes.toBytes(value));
// 执行Put操作
table.put(put);
// 关闭表连接
table.close();
connection.close();
}
public void readDataFromHBase(String tableName, String rowKey) throws IOException {
// 创建HBase表对应的配置对象
Configuration hbaseConfig = HBaseConfiguration.create(configuration);
// 创建一个表对应的连接
Connection connection = ConnectionFactory.createConnection(hbaseConfig);
// 获取表对象
Table table = connection.getTable(TableName.valueOf(tableName));
// 创建Get对象
Get get = new Get(Bytes.toBytes(rowKey));
// 执行Get操作
Result result = table.get(get);
// 输出结果
for (Cell cell : result.rawCells()) {
System.out.println("行: " + Bytes.toString(CellUtil.cloneRow(cell)));
System.out.println("列族: " + Bytes.toString(CellUtil.cloneFamily(cell)));
System.out.println("列: " + Bytes.toString(CellUtil.cloneQualifier(cell)));
评论已关闭