2-1-6
作者:互联网
package task; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Scanner; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; public class DataStorage_Hbase { //1、导入数据源 private static String file = "resource\\gdp数据.txt"; //数据库资源参数 private static String name = "hbase.zookeeper.quorum"; private static String value = "192.168.157.201"; private static byte[] tableName = Bytes.toBytes("bigdata6"); private static byte[] family = Bytes.toBytes("gdp"); //2、数据库连接 public static Connection getConnection() throws IOException { Configuration conf=HBaseConfiguration.create(); conf.set(name, value); //设置HBase链接的配置参数 Connection conn=ConnectionFactory.createConnection(conf); return conn; } //3、建表 public static void createTable() throws Exception{ Connection conn=getConnection(); //创建链接 //表描述对象,表名bigdata HTableDescriptor htd=new HTableDescriptor(TableName.valueOf(tableName)); //列描述对象 HColumnDescriptor hcd=new HColumnDescriptor(family); htd.addFamily(hcd); Admin admin=conn.getAdmin();//管理者,负责建表、删表、禁用表等 if(admin.tableExists(TableName.valueOf(tableName))) { admin.disableTable(TableName.valueOf(tableName)); admin.deleteTable(TableName.valueOf(tableName)); } admin.createTable(htd); admin.close(); conn.close(); } //4、批量插入数据 public static void putData(ArrayList<String []> list) throws Exception { Connection conn=getConnection(); Table table=conn.getTable(TableName.valueOf(tableName)); //创建表对象 ArrayList<Put> puts=new ArrayList<Put>();//批量插入操作的put对象 for(String[] line:list) { //遍历list中的32行数据 Put put=new Put(Bytes.toBytes(line[0]));//参数为行键,使用数据源的第一列作为行键 put.addColumn(family,Bytes.toBytes("gj"),Bytes.toBytes(line[1])); put.addColumn(family,Bytes.toBytes("dq"),Bytes.toBytes(line[2])); put.addColumn(family,Bytes.toBytes("gdp"),Bytes.toBytes(line[3])); put.addColumn(family,Bytes.toBytes("bfb"),Bytes.toBytes(line[1])); puts.add(put); } table.put(puts);//批量插入32行数据 table.close(); conn.close(); } //5、查询数据 public static void scanData() throws IOException { Connection conn=getConnection(); Scan scan=new Scan(); Table table=conn.getTable(TableName.valueOf(tableName)); ResultScanner rs=table.getScanner(scan);//扫描器 for(Result result:rs) { //每行是一个result,由rowkey和cells组成 String rowkey = Bytes.toString(result.getRow());//获取行键 System.out.println("行键:"+rowkey); List<Cell> Cells=result.listCells(); //获取cells,即除行键外的其他列数据 for(Cell cell:Cells) { String family = Bytes.toString(CellUtil.cloneFamily(cell)); String qualifier = Bytes.toString(CellUtil.cloneQualifier(cell)); String value = Bytes.toString(CellUtil.cloneValue(cell)); System.out.println("值:"+family+"_"+qualifier+":"+value); } } rs.close(); table.close(); conn.close(); } //读取数据 public static ArrayList<String []> readData() throws IOException { ArrayList<String[]> list = new ArrayList<String[]>(); Scanner in = new Scanner(new File(file)); while(in.hasNext()) { String read = in.nextLine(); if(read!=null&&!read.equals(null)) { String[] line = read.split("\t"); for(String str : line) { System.out.println(str+","); } list.add(line); } } in.close(); return list; } public static void main(String[] args) throws Exception { createTable(); ArrayList<String[]> list = readData(); putData(list); scanData(); } }
标签:,Bytes,hadoop,org,apache,import,hbase 来源: https://www.cnblogs.com/modikasi/p/16689116.html