Loading

使用jdbc的方式连接kyuubi并提交flink sql(存在kerberos认证)

在有kerberos认证的集群里,使用jdbc的方式连接kyuubi并提交flink sql到flink集群运行还是有点麻烦的。我查阅了网上的不少资料,也照着做,发现都不行,最后按照下面这种方法成功提交并运行了。

注意: 运行代码之前要保证运行提交代码的机器与目标服务器实现时间同步,不然代码运行可能会报以下错误

Exception in thread "main" java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://node01:10009/;principal=hive/node01@HADOOP.COM: GSS initiate failed
	at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:256)
	at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107)
	at java.sql.DriverManager.getConnection(DriverManager.java:664)
	at java.sql.DriverManager.getConnection(DriverManager.java:270)
	at com.bitzh.kyuubiTest.main(kyuubiTest.java:43)
Caused by: org.apache.thrift.transport.TTransportException: GSS initiate failed
	at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:232)
	at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:316)
	at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
	at org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:51)
	at org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:48)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1962)
	at org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport.open(TUGIAssumingTransport.java:48)
	at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:343)
	at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:228)
	... 4 more

以下是连接方法:

maven依赖

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>3.1.2</version>
</dependency>

连接代码

package com.bitzh;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;

import java.io.IOException;
import java.sql.*;


public class kyuubiTest {

    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
    private static String kyuubiJdbcUrl = "jdbc:hive2://node01:10009/;principal=hive/node01@HADOOP.COM";

    public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException {
        System.setProperty("java.security.krb5.conf","C:\\Users\\Administrator\\Desktop\\krb5.conf");
        Configuration configuration = new Configuration();
        configuration.setBoolean("hadoop.security.authorization", true);
        configuration.set("hadoop.security.authentication","Kerberos");
        UserGroupInformation.setConfiguration(configuration);
        try {

            if(UserGroupInformation.isLoginKeytabBased())
            {
                UserGroupInformation.getLoginUser().reloginFromKeytab();
            }else{
                UserGroupInformation.loginUserFromKeytab("hive/node01@HADOOP.COM","C:\\Users\\Administrator\\Desktop\\hive.keytab");

            }
            System.out.println("ticketCache=====>"+UserGroupInformation.isLoginTicketBased());

        } catch (IOException e) {
            e.printStackTrace();
        }
        String sql = "select '123';";
        ResultSet res = null;
        Class.forName(driverName);
        Connection conn = DriverManager.getConnection(kyuubiJdbcUrl);
        Statement st = conn.createStatement();
        String[] splits = sql.split(";");
        for (String split:splits){
            res = st.executeQuery(split);
            while (res.next()){
                System.out.println(res.getString(1));
            }
        }
        res.close();
        st.close();
        conn.close();
    }
}
posted @ 2023-03-19 21:42  爱睡懒觉的我  阅读(668)  评论(0)    收藏  举报