访问HIVE的代码
大数据平台要升级了kerberos认证,下面是JDBC 连接hive 程序片段,记录下
public Connection getConnection() throws IOException, SQLException, ClassNotFoundException {
Class.forName("org.apache.hive.jdbc.HiveDriver");
Properties p = new Properties();
InputStream in = HiveUtil.class.getResourceAsStream("/config/hive.properties");
p.load(in);
Connection conn=null ;
//检查kerberos开关是否打开
if("on".equals(p.getProperty("kerberos.authen.function"))){
String hiveUrl = p.getProperty("hiveUrl");
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
//下面2行在本地测试需要打开,生产上已经配置到环境变量里,需要注释
// System.setProperty("java.security.auth.login.config", p.getProperty("jassPath"));
// System.setProperty("java.security.krb5.conf", p.getProperty("kbsConfig"));
conf.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(p.getProperty("kbsUser"), p.getProperty("kbsKeyTab"));
conn = DriverManager.getConnection(hiveUrl);
}else{
String hiveUrl = p.getProperty("hiveUrl");
String username=p.getProperty("username");
String password=p.getProperty("password");
if("".equals(username.trim())) {
conn = DriverManager.getConnection(hiveUrl);
}else{
conn = DriverManager.getConnection(hiveUrl, username, password);
}
}
in.close();
return conn;
}
配置文件:
username=XXX
password=
kerberos.authen.function=on
hiveUrl=jdbc:hive2://XXX.com:10001/default;principal=hive/XXX@XX.COM
jassPath=./zk-jaas.conf
kbsConfig=./krb5.conf
kbsUser=hive/XXX@XX.COM
kbsKeyTab=./hive.keytab
需要下面maven的依赖
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
</dependency>
JDBC访问SparkSQL代码
public static void testFun(String sql){
//String url = "jdbc:fiber://fiberconfig=D:/gsl/fiber/src/fiber.xml";
String url = "jdbc:hive2://xxx.xxx.xxx.xxx:port1,xxx.xxx.xxx.xxx:port1,xxx.xxx.xxx.xxx:port/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=sparkthriftserver;saslQop=auth-conf;auth=KERBEROS;principal=spark/hadoop.hadoop.com@HADOOP.COM;user.principal=XXX;";
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
System.setProperty("java.security.auth.login.config", "E:/hehe/hehe/src/jaas.conf");
System.setProperty("java.security.krb5.conf", "D:/hehe/hehe/src/krb5.conf");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("hehe","D:/hehe/hehe/src/user.keytab");
Connection conn = DriverManager.getConnection(url);
System.out.println(Thread.currentThread().getName()+ " " + conn);
Statement stmt = conn.createStatement();
//Statement stmt = conn.createStatement();
System.out.println(Thread.currentThread().getName()+ " " +stmt);
stmt.execute("set fiber.execution.engine = spark2x");
long startTime = System.currentTimeMillis();
System.out.println(startTime);
System.out.println(Thread.currentThread().getName()+ " " +sql);
stmt.execute(sql);
long endTime = System.currentTimeMillis();
long useTime = endTime - startTime;
System.out.println(Thread.currentThread().getName() + "用时: "+useTime);
if (conn!=null)
conn.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e){
e.printStackTrace();
}
}
}