Hadoop版本:hadoop-2.9.2.tar.gz,Hive版本:apache-hive-2.3.6-src.tar.gz,安裝Hive可查看:CentOS安裝Hive
保證Hive以正確啟動hiveserver2。
pom.xml依賴:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.zhi.test</groupId> <artifactId>hive-test</artifactId> <version>0.0.1-SNAPSHOT</version> <packaging>jar</packaging> <name>hive-test</name> <url>http://maven.apache.org</url> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <!-- Logger --> <lg4j2.version>2.12.1</lg4j2.version> </properties> <dependencies> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>2.3.6</version> <exclusions> <exclusion> <groupId>log4j</groupId> <artifactId>log4j</artifactId> </exclusion> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>8.0.17</version> </dependency> <!-- Logger(log4j2) --> <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-core</artifactId> <version>${lg4j2.version}</version> </dependency> <!-- Log4j 1.x API Bridge --> <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-1.2-api</artifactId> <version>${lg4j2.version}</version> </dependency> <!-- SLF4J Bridge --> <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-slf4j-impl</artifactId> <version>${lg4j2.version}</version> </dependency> <dependency> <groupId>jdk.tools</groupId> <artifactId>jdk.tools</artifactId> <version>1.8</version> <scope>system</scope> <systemPath>D:\Program Files\Java\jdk1.8.0_191\lib/tools.jar</systemPath> </dependency> <dependency> <groupId>org.junit.jupiter</groupId> <artifactId>junit-jupiter-engine</artifactId> <version>5.5.2</version> </dependency> </dependencies> </project>
Java代碼:
package com.zhi.test.hive; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.TestInstance.Lifecycle; /** * 使用JDBC進行Hive操作 * * @author zhi * @since 2019年9月11日11:55:00 * */ @TestInstance(Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class HiveTest { private final Logger logger = LogManager.getLogger(this.getClass()); private static Connection connection = null; @BeforeAll public void init() throws Exception { try { Class.forName("org.apache.hive.jdbc.HiveDriver"); connection = DriverManager.getConnection("jdbc:hive2://192.168.107.143:10000/test_db", "root", "abc123"); } catch (SQLException | ClassNotFoundException e) { logger.error("創建Hive連接失敗", e); throw e; } } @AfterAll public void destory() throws Exception { if (connection != null) { connection.close(); } } /** * 創建數據庫 */ @Order(1) @Test public void createDatabase() { String sql = "create database test_db"; logger.info("創建數據庫,腳本:{}", sql); try (Statement statement = connection.createStatement()) { statement.execute(sql); logger.info("創建數據庫成功"); } catch (SQLException e) { logger.error("創建數據庫出錯", e); } } /** * 查詢數據庫 */ @Order(2) @Test public void showDatabases() { String sql = "show databases"; logger.info("查詢數據庫,腳本:{}", sql); try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) { while (rs.next()) { logger.info("查詢到數據庫:{}", rs.getString(1)); } } catch (SQLException e) { logger.error("創建數據庫出錯", e); } } /** * 創建表 */ @Order(3) @Test public void createTable() { String sql = "create table user_tb(id int, name string) row format delimited fields terminated by ','"; logger.info("創建表,腳本:{}", sql); try (Statement statement = connection.createStatement()) { statement.execute(sql); logger.info("創建表成功"); } catch (SQLException e) { logger.error("創建表出錯", e); } } /** * 查詢所有表 */ @Order(3) @Test public void showTables() { String sql = "show tables"; logger.info("查詢所有表,腳本:{}", sql); try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) { while (rs.next()) { logger.info("查詢到表:{}", rs.getString(1)); } } catch (SQLException e) { logger.error("查詢所有表出錯", e); } } /** * 查看表結構 */ @Order(4) @Test public void descTable() { String sql = "desc user_tb"; logger.info("查看表結構,腳本:{}", sql); try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) { while (rs.next()) { logger.info("字段名:{},類型:{}", rs.getString(1), rs.getString(2)); } } catch (SQLException e) { logger.error("查看表結構出錯", e); } } /** * 導入數據,data.txt中的數據為格式為:<br> * 1,張三<br> * 2,李四 */ @Order(5) @Test public void loadData() { String sql = "load data local inpath '/home/data.txt' overwrite into table user_tb"; logger.info("導入數據,腳本:{}", sql); try (Statement statement = connection.createStatement()) { statement.execute(sql); logger.info("導入數據成功"); } catch (SQLException e) { logger.error("導入數據出錯", e); } } /** * 查詢數據 */ @Order(6) @Test public void selectData() { String sql = "select * from user_tb"; logger.info("查詢數據,腳本:{}", sql); try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) { while (rs.next()) { logger.info("id={},name={}", rs.getInt("id"), rs.getString("name")); } } catch (SQLException e) { logger.error("查詢數據出錯", e); } } /** * 查數量 */ @Order(7) @Test public void count() { String sql = "select count(1) from user_tb"; logger.info("查數量,腳本:{}", sql); try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) { while (rs.next()) { logger.info("數量={}", rs.getInt(1)); } } catch (SQLException e) { logger.error("查數量出錯", e); } } /** * 刪除表 */ @Order(8) @Test public void deopTable() { String sql = "drop table if exists user_tb"; logger.info("刪除表,腳本:{}", sql); try (Statement statement = connection.createStatement()) { statement.execute(sql); logger.info("刪除表成功"); } catch (SQLException e) { logger.error("刪除表出錯", e); } } /** * 刪除數據庫 */ @Order(9) @Test public void dropDatabase() { String sql = "drop database if exists test_db"; logger.info("刪除數據庫,腳本:{}", sql); try (Statement statement = connection.createStatement()) { statement.execute(sql); logger.info("刪除數據庫成功"); } catch (SQLException e) { logger.error("刪除數據庫出錯", e); } } }
代碼執行結果:
2019-09-11 22:05:52.324 [main] INFO org.apache.hive.jdbc.Utils.parseURL 325 - Supplied authorities: 192.168.107.143:10000 2019-09-11 22:05:52.334 [main] INFO org.apache.hive.jdbc.Utils.parseURL 444 - Resolved authority: 192.168.107.143:10000 2019-09-11 22:05:52.772 [main] INFO com.zhi.test.hive.HiveTest.createDatabase 58 - 創建數據庫,腳本:create database test_db 2019-09-11 22:05:53.098 [main] INFO com.zhi.test.hive.HiveTest.createDatabase 61 - 創建數據庫成功 2019-09-11 22:05:53.131 [main] INFO com.zhi.test.hive.HiveTest.showDatabases 74 - 查詢數據庫,腳本:show databases 2019-09-11 22:05:53.332 [main] INFO com.zhi.test.hive.HiveTest.showDatabases 77 - 查詢到數據庫:default 2019-09-11 22:05:53.332 [main] INFO com.zhi.test.hive.HiveTest.showDatabases 77 - 查詢到數據庫:test_db 2019-09-11 22:05:53.347 [main] INFO com.zhi.test.hive.HiveTest.createTable 91 - 創建表,腳本:create table user_tb(id int, name string) row format delimited fields terminated by ',' 2019-09-11 22:05:53.623 [main] INFO com.zhi.test.hive.HiveTest.createTable 94 - 創建表成功 2019-09-11 22:05:53.644 [main] INFO com.zhi.test.hive.HiveTest.showTables 107 - 查詢所有表,腳本:show tables 2019-09-11 22:05:53.784 [main] INFO com.zhi.test.hive.HiveTest.showTables 110 - 查詢到表:user_tb 2019-09-11 22:05:53.784 [main] INFO com.zhi.test.hive.HiveTest.showTables 110 - 查詢到表:usertest_tb 2019-09-11 22:05:53.806 [main] INFO com.zhi.test.hive.HiveTest.descTable 124 - 查看表結構,腳本:desc user_tb 2019-09-11 22:05:53.971 [main] INFO com.zhi.test.hive.HiveTest.descTable 127 - 字段名:id,類型:int 2019-09-11 22:05:53.971 [main] INFO com.zhi.test.hive.HiveTest.descTable 127 - 字段名:name,類型:string 2019-09-11 22:05:53.987 [main] INFO com.zhi.test.hive.HiveTest.loadData 143 - 導入數據,腳本:load data local inpath '/home/data.txt' overwrite into table user_tb 2019-09-11 22:05:55.106 [main] INFO com.zhi.test.hive.HiveTest.loadData 146 - 導入數據成功 2019-09-11 22:05:55.119 [main] INFO com.zhi.test.hive.HiveTest.selectData 159 - 查詢數據,腳本:select * from user_tb 2019-09-11 22:05:55.870 [main] INFO com.zhi.test.hive.HiveTest.selectData 162 - id=1,name=張三 2019-09-11 22:05:55.871 [main] INFO com.zhi.test.hive.HiveTest.selectData 162 - id=2,name=李四 2019-09-11 22:05:55.890 [main] INFO com.zhi.test.hive.HiveTest.count 176 - 查數量,腳本:select count(1) from user_tb 2019-09-11 22:05:57.952 [main] INFO com.zhi.test.hive.HiveTest.count 179 - 數量=2 2019-09-11 22:05:57.983 [main] INFO com.zhi.test.hive.HiveTest.deopTable 193 - 刪除表,腳本:drop table if exists user_tb 2019-09-11 22:05:58.256 [main] INFO com.zhi.test.hive.HiveTest.deopTable 196 - 刪除表成功 2019-09-11 22:05:58.268 [main] INFO com.zhi.test.hive.HiveTest.dropDatabase 209 - 刪除數據庫,腳本:drop database if exists test_db 2019-09-11 22:05:58.494 [main] INFO com.zhi.test.hive.HiveTest.dropDatabase 212 - 刪除數據庫成功