用Java代碼通過JDBC連接Hiveserver2


1.在終端啟動hiveserver2
#hiveserver2

2.使用beeline連接hive
另外打開一個終端,輸入如下命令(xavierdb必須是已經存在的數據庫)
#beeline -u jdbc:hive2://localhost:10000/xavierdb -n hive -p hive

3.添加maven依賴

<!-- https://mvnrepository.com/artifact/org.apache.hadoop.hive/hive-jdbc -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>1.1.0</version>
        </dependency>

        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.9</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.6.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.6.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-metastore</artifactId>
            <version>1.1.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-exec</artifactId>
            <version>1.1.0</version>
        </dependency>
maven依賴

出現過的錯誤: Error: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:10000/default

解決辦法:檢查發現運行beeline時Driver版本Driver: Hive JDBC (version 1.1.0-cdh5.16.1)比maven依賴中的Driver版本低,將maven版本調至1.1.0問題解決

Java API測試:

注意:這里的url必須是beeline值中使用的url

 
         
package TestOption;


import org.junit.Test;
import org.junit.After;
import org.junit.Before;

import java.sql.*;

/**
* @Author:Xavier
* @Data:2019-02-18 11:43
**/


public class HiveOption {

private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://yourhost:10000/yourdatabase";

private static Connection con = null;
private static Statement state = null;
private static ResultSet res = null;

//加載驅動,創建連接
@Before
public void init() throws ClassNotFoundException, SQLException {
Class.forName(driverName);
con = DriverManager.getConnection(url, "hive", "hive");
state = con.createStatement();
}

//創建數據庫
@Test
public void CreateDb() throws SQLException {

state.execute("create database xavierdb1");

}

// 查詢所有數據庫
@Test
public void showtDb() throws SQLException {
res = state.executeQuery("show databases");
while (res.next()) {
System.out.println(res.getString(1));
}
}

// 刪除數據庫
@Test
public void dropDb() throws SQLException {
state.execute("drop database if exists xavierdb1");
}


/*
*
*
* 內部表基本操作
*
*
* */

// 創建表
@Test
public void createTab() throws SQLException {

state.execute("create table if not exists student ( " +
"name string , " +
"age int , " +
"agent string ," +
"adress struct<street:STRING,city:STRING>) " +
"row format delimited " +
"fields terminated by ',' " +//字段與字段之間的分隔符
"collection items terminated by ':'" +//一個字段各個item的分隔符
"lines terminated by '\n' ");//行分隔符
}

// 查詢所有表
@Test
public void showTab() throws SQLException {
res = state.executeQuery("show tables");
while (res.next()) {
System.out.println(res.getString(1));
}
}

// 查看表結構
@Test
public void descTab() throws SQLException {
res = state.executeQuery("desc emp");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}

// 加載數據
@Test
public void loadData() throws SQLException {
String infile = " '/root/studentData' ";
state.execute("load data local inpath " + infile + "overwrite into table student");
}

// 查詢數據
@Test
public void selectTab() throws SQLException {
res = state.executeQuery("select * from student1");
while (res.next()) {
System.out.println(
res.getString(1) + "-" +
res.getString(2) + "-" +
res.getString(3) + "-" +
res.getString(4));
}
}

// 統計查詢(會運行mapreduce作業,資源開銷較大)
@Test
public void countData() throws SQLException {
res = state.executeQuery("select count(1) from student");
while (res.next()) {
System.out.println(res.getInt(1));
}
}

// 刪除表
@Test
public void dropTab() throws SQLException {
state.execute("drop table emp");
}


/*
* 外部表基本操作
*
*外部表刪除后,hdfs文件系統上的數據還在,
*重新創建同路徑外部表后,其數據仍然存在
*
* */

//創建外部表
@Test
public void createExTab() throws SQLException {

state.execute("create external table if not exists student1 ( " +
"name string , " +
"age int , " +
"agent string ," +
"adress struct<street:STRING,city:STRING>) " +
"row format delimited " +
"fields terminated by ',' " +
"collection items terminated by ':'" +
"lines terminated by '\n' " +
"stored as textfile " +
"location '/testData/hive/student1' ");//不指定路徑時默認使用hive.metastore.warehouse.dir指定的路徑
}

//從一張已經存在的表上復制其表結構,並不會復制其數據
//
//創建表,攜帶數據
//create table student1 as select * from student
//創建表,攜帶表結構
//create table student1 like student
//
@Test
public void copyExTab() throws SQLException {
state.execute("create external table if not exists student2 " +
"like xavierdb.student " +
"location '/testData/hive/student1'");
}


/*
* 分區表
*
* 必須在表定義時創建partition
*
*
* */

//靜態分區


//創建分區格式表
@Test
public void creatPartab() throws SQLException {
state.execute("create table if not exists emp (" +
"name string ," +
"salary int ," +
"subordinate array<string> ," +
"deductions map<string,float> ," +
"address struct<street:string,city:string>) " +
"partitioned by (city string,street string) " +
"row format delimited " +
"fields terminated by '\t' " +
"collection items terminated by ',' " +
"map keys terminated by ':' " +
"lines terminated by '\n' " +
"stored as textfile");
}

//添加分區表
@Test
public void addPartition() throws SQLException {
state.execute("alter table emp add partition(city='shanghai',street='jinkelu') ");
}

//查看分區表信息
@Test
public void showPartition() throws SQLException {
// res=state.executeQuery("select * from emp");
res = state.executeQuery("show partitions emp");
while (res.next()) {
System.out.println(res.getString(1));
}
}

//插入數據
@Test
public void loadParData() throws SQLException {
String filepath = " '/root/emp' ";
state.execute("load data local inpath " + filepath + " overwrite into table emp partition (city='shanghai',street='jinkelu')");
}

//刪除分區表
@Test
public void dropPartition() throws SQLException {
state.execute("alter table employees drop partition (city='shanghai',street='jinkelu') ");
/*
*
* 1,把一個分區打包成一個har
alter table emp archive partition (city='shanghai',street='jinkelu')
2, 把一個分區har包還原成原來的分區
` alter table emp unarchive partition (city='shanghai',street='jinkelu')
3, 保護分區防止被刪除
alter table emp partition (city='shanghai',street='jinkelu') enable no_drop
4,保護分區防止被查詢
alter table emp partition (city='shanghai',street='jinkelu') enable offline
5,允許分區刪除和查詢
alter table emp partition (city='shanghai',street='jinkelu') disable no_drop
alter table emp partition (city='shanghai',street='jinkelu') disable offline
* */
}
//外部表同樣可以使用分區


//動態分區
//
//當需要一次插入多個分區的數據時,可以使用動態分區,根據查詢得到的數據動態分配到分區里。
// 動態分區與靜態分區的區別就是不指定分區目錄,由hive根據實際的數據選擇插入到哪一個分區。
//
//set hive.exec.dynamic.partition=true; 啟動動態分區功能
//set hive.exec.dynamic.partition.mode=nonstrict 分區模式,默認nostrict
//set hive.exec.max.dynamic.partitions=1000 最大動態分區數,默認1000

//創建分區格式表
@Test
public void creatPartab1() throws SQLException {
state.execute("create table if not exists emp1 (" +
"name string ," +
"salary int ," +
"subordinate array<string> ," +
"deductions map<string,float> ," +
"address struct<street:string,city:string>) " +
"partitioned by (city string,street string) " +
"row format delimited " +
"fields terminated by '\t' " +
"collection items terminated by ',' " +
"map keys terminated by ':' " +
"lines terminated by '\n' " +
"stored as textfile");
}

//靠查詢到的數據來分區
@Test
public void loadPartitionData() throws SQLException {
state.execute("insert overwrite table emp1 partition (city='shanghai',street) " +
"select name,salary,subordinate,deductions,address,address.street from emp");
}


// 釋放資源
@After
public void destory() throws SQLException {
if (res != null) state.close();
if (state != null) state.close();
if (con != null) con.close();
}
}
 

 

 

***連接HiveServer2修改hive配置的方法***

1)、直接在URL中添加

 

...
url = "jdbc:hive2://yourhost:10000/yourdatabase?mapreduce.job.queuename=root.hive-server2;hive.execution.enginer=spark";
Connection con = DriverManager.getConnection(url,  "hive", "hive");
...

多個conf配置之間使用" ; "  分割開;conf配置內容和url 變量內容使用" # "分割開

 

2)、使用state.execute 直接執行set 操作

 

...
state.execute("set hive.execution.engine=spark");
...

 

3 )、通過連接屬性設置

Class.forName("org.apache.hive.jdbc.HiveDriver");
Properties propertie = new Properties();
propertie.setProperty("user", "hive");
propertie.setProperty("password", "hive");
// 這里傳遞了一個隊列的hive_conf
propertie.setProperty("hive.execution.engine", "spark");
String url="jdbc:hive2://yourhost:10000/yourdatabase";
Connection conn = DriverManager.getConnection(url, propertie);
HiveStatement stat = (HiveStatement) conn.createStatement();

 

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM