其他分享
首页 > 其他分享> > JDBC连接hive(附完整代码)

JDBC连接hive(附完整代码)

作者:互联网

一、创建maven工程

1、选择quickStart

2、导入依赖(如下)——依赖定时检查更新

<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
</dependency>
</dependencies>

二、创建resource

1、HiveResource.properties

driver=org.apache.hive.jdbc.HiveDriver
url=jdbc:hive2://你的虚拟机ip地址:10000/hive的库名
username=root
password=密码

2、log4j.properties——可加可不加(日志)

log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/hadoop.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
注意

这边的资源文件是虚拟机hive里

beeline -u jdbc:hive2://你的虚拟机ip地址:10000 -n root
//进入hive客户端,查看库和对应的表

或者打开网页:你的ip地址:9870
在这里插入图片描述

三、代码如下

BaseDao类

package cn.kgc.jdbc;

import java.io.FileReader;
import java.io.IOException;
import java.sql.*;
import java.util.Properties;

public class BaseDao {
    private static final Properties properties = new Properties();
    private static final String PATH="config/HiveResource.properties";

    static {
        try {
            properties.load(new FileReader(PATH));
            Class.forName(properties.getProperty("driver"));
        } catch (IOException | ClassNotFoundException e) {
            e.printStackTrace();
            System.exit(-1);
        }
    }

    private static Connection con() throws SQLException {
        return DriverManager.getConnection(
                properties.getProperty("url"),
                properties.getProperty("username"),
                properties.getProperty("password")
        );
    }

    private static PreparedStatement pst(Connection con,String sql,Object...objs) throws SQLException {
        PreparedStatement pst = con.prepareStatement(sql);
        if (null != objs && objs.length>0){
            for (int i = 0; i < objs.length; i++) {
                pst.setObject(i+1,objs[i]);
            }
        }
        return pst;
    }

    private static void  close(AutoCloseable...closes){
        for (AutoCloseable close : closes) {
            if (null != close){
                try {
                    close.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    //执行增删改操作
    public static int exeUpdate(String sql ,Object...objs){
        Connection con = null;
        PreparedStatement pst = null;
        try {
            con = con();
            pst = pst(con,sql,objs);
            return pst.executeUpdate();
        } catch (SQLException e) {
            e.printStackTrace();
        }finally {
            close(pst,con);
        }
        return -1;
    }

    //执行批量插入
    public  static int addBatch(String sql,int cloSize,int batchSize,Object...objs){
        Connection con= null;
        PreparedStatement pst = null;
        int affactedRows=0;
        try {
            con = con();
            pst = con.prepareStatement(sql);
            for (int i = 0,realSize = 0; i < objs.length; i+=cloSize) {
                for (int j = i,size=0; size <cloSize ;size++, j++) {
                    pst.setObject(size+1,objs[j]);
                }
                pst.addBatch();
                if (++realSize%batchSize==0){
                    affactedRows = pst.executeBatch().length;
                    pst.clearBatch();
                }
                affactedRows = pst.executeBatch().length;
                pst.clearBatch();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            close(pst,con);
        }
        return -1;
    }

    //查询输出
    public  static void exeQuery(String sql){
        Connection con = null;
        PreparedStatement pst = null;
        ResultSet rst = null;
        try {
            con = con();
            pst = con.prepareStatement(sql);
            rst = pst.executeQuery();
            int colSize = rst.getMetaData().getColumnCount();
            while (rst.next()){
                for (int i = 1; i <= colSize ; i++) {
                    System.out.print(rst.getObject(i).toString());
                    System.out.print("\t");
                }
                System.out.println();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            close(rst,pst,con);
        }
    }
}

2、Test类(和jdbc连接mysql一样)

package cn.kgc;


import cn.kgc.jdbc.BaseDao;

public class Test {
    public static void main(String[] args){
        BaseDao baseDao= new BaseDao();
        //查询
        /*String sql = "select * from sqp_testsqoop_par";
        BaseDao.exeQuery(sql);*/

        //单条数据导入
        /*String sql = "insert into studentinfo(stuId,stuName,stuAge,StuGender,fkClassId) values(?,?,?,?,?)";
        BaseDao.exeUpdate(sql,30,"戴敏","23","男",1);*/

        //批量导入
        /*String sql = "insert into studentinfo(stuId,stuName,stuAge,StuGender,fkClassId) values(?,?,?,?,?)";
        Object[] arr = {12,"戴敏","24","男",1,13,"张","25","男",1}
        BaseDao.addBatch(sql,5,2,arr);*/

        //删除
        /*String sql = "delete from studentinfo where stuId like ?";
        BaseDao.exeUpdate(sql,30);*/

        //修改
        String sql = "update studentinfo set stuName= ?,StuGender = ? where stuId = 31";
        BaseDao.exeUpdate(sql,"戴敏","男");

    }
}


标签:JDBC,代码,hadoop,hive,sql,apache,log4j,pst,con
来源: https://blog.csdn.net/daimin1/article/details/119062550