使用KERBEROS認證訪問Spark JDBC和Hive


訪問HIVE的代碼

大數據平台要升級了kerberos認證,下面是JDBC 連接hive 程序片段,記錄下

public Connection getConnection() throws IOException, SQLException, ClassNotFoundException {
Class.forName("org.apache.hive.jdbc.HiveDriver");

Properties p = new Properties();
InputStream in = HiveUtil.class.getResourceAsStream("/config/hive.properties");
p.load(in);
Connection conn=null ;
//檢查kerberos開關是否打開
if("on".equals(p.getProperty("kerberos.authen.function"))){
String hiveUrl = p.getProperty("hiveUrl");
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
//下面2行在本地測試需要打開,生產上已經配置到環境變量里,需要注釋
// System.setProperty("java.security.auth.login.config", p.getProperty("jassPath"));
// System.setProperty("java.security.krb5.conf", p.getProperty("kbsConfig"));
conf.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(p.getProperty("kbsUser"), p.getProperty("kbsKeyTab"));
conn = DriverManager.getConnection(hiveUrl);
}else{
String hiveUrl = p.getProperty("hiveUrl");
String username=p.getProperty("username");
String password=p.getProperty("password");
if("".equals(username.trim())) {
conn = DriverManager.getConnection(hiveUrl);
}else{
conn = DriverManager.getConnection(hiveUrl, username, password);
}

}


in.close();
return conn;
}

配置文件:
username=XXX
password=
kerberos.authen.function=on
hiveUrl=jdbc:hive2://XXX.com:10001/default;principal=hive/XXX@XX.COM
jassPath=./zk-jaas.conf
kbsConfig=./krb5.conf
kbsUser=hive/XXX@XX.COM
kbsKeyTab=./hive.keytab


需要下面maven的依賴
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
</dependency>



JDBC訪問SparkSQL代碼
public static void testFun(String sql){
//String url = "jdbc:fiber://fiberconfig=D:/gsl/fiber/src/fiber.xml";
String url = "jdbc:hive2://xxx.xxx.xxx.xxx:port1,xxx.xxx.xxx.xxx:port1,xxx.xxx.xxx.xxx:port/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=sparkthriftserver;saslQop=auth-conf;auth=KERBEROS;principal=spark/hadoop.hadoop.com@HADOOP.COM;user.principal=XXX;";
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");

org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
System.setProperty("java.security.auth.login.config", "E:/hehe/hehe/src/jaas.conf");
System.setProperty("java.security.krb5.conf", "D:/hehe/hehe/src/krb5.conf");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("hehe","D:/hehe/hehe/src/user.keytab");
Connection conn = DriverManager.getConnection(url);
System.out.println(Thread.currentThread().getName()+ " " + conn);
Statement stmt = conn.createStatement();
//Statement stmt = conn.createStatement();
System.out.println(Thread.currentThread().getName()+ " " +stmt);
stmt.execute("set fiber.execution.engine = spark2x");
long startTime = System.currentTimeMillis();
System.out.println(startTime);
System.out.println(Thread.currentThread().getName()+ " " +sql);
stmt.execute(sql);
long endTime = System.currentTimeMillis();
long useTime = endTime - startTime;
System.out.println(Thread.currentThread().getName() + "用時: "+useTime);
if (conn!=null)
conn.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e){
e.printStackTrace();
}
}
}




免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM