IDEA連接hive


使用工具連接hive:

https://blog.csdn.net/weixin_44508906/article/details/91348665

配置hive-site.xml

在hive-site.xml中加入配置信息,ip需要修改

<property>
    <name>hive.server2.thrift.port</name>
    <value>10000</value>
</property>
<property>
     <name>hive.server2.thrift.bind.host</name>
    <value>192.168.58.128</value>
</property>

配置core-site.xml文件

在core-site.xml中加入配置信息

<property>
    <name>hadoop.proxyuser.hadoop.hosts</name>
    <value>*</value>
</property>
<property>   
    <name>hadoop.proxyuser.hadoop.groups</name>
    <value>*</value>
</property>

 

啟動hadoop&hiveserver2

啟動hadoop集群

start-all.sh

啟動hiveserver2

cd /usr/local/hive
./bin/hiveserver2

這里打開一個進程會停住,另開一個終端,

輸入

beeline

輸入

!connect jdbc:hive2://192.168.58.128:10000

連接hive,之后輸入在hive-site.xml文件中配置的username和password。

 

 之后進入http://192.168.58.128:10002可查看連接狀態

 

 


代碼連接:
先從虛擬機上找到

 

把這三個jar包加到file->Project Structure->Modules->Dependencies

點擊ok

代碼:

import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.sql.*;

/**
 * JDBC 操作 Hive(注:JDBC 訪問 Hive 前需要先啟動HiveServer2)
 */
public class HiveJDBC {

    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
    private static String url = "jdbc:hive2://192.168.58.128:10000/myhive";
    private static String user = "root";
    private static String password = "123456";

    private static Connection conn = null;
    private static Statement stmt = null;
    private static ResultSet rs = null;

    // 加載驅動、創建連接
    @Before
    public void init() throws Exception {
        Class.forName(driverName);
        conn = DriverManager.getConnection(url,user,password);
        stmt = conn.createStatement();
    }

    // 創建數據庫
    @Test
    public void createDatabase() throws Exception {
        String sql = "create database hive_jdbc_test";
        System.out.println("Running: " + sql);
        stmt.execute(sql);
    }

    // 查詢所有數據庫
    @Test
    public void showDatabases() throws Exception {
        String sql = "show databases";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString(1));
        }
    }

    // 創建表
    @Test
    public void createTable() throws Exception {
        String sql = "create table emp(\n" +
                "empno int,\n" +
                "ename string,\n" +
                "job string,\n" +
                "mgr int,\n" +
                "hiredate string,\n" +
                "sal double,\n" +
                "comm double,\n" +
                "deptno int\n" +
                ")\n" +
                "row format delimited fields terminated by '\\t'";
        System.out.println("Running: " + sql);
        stmt.execute(sql);
    }

    // 查詢所有表
    @Test
    public void showTables() throws Exception {
        String sql = "show tables";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString(1));
        }
    }

    // 查看表結構
    @Test
    public void descTable() throws Exception {
        String sql = "desc emp";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString(1) + "\t" + rs.getString(2));
        }
    }

    // 加載數據
    @Test
    public void loadData() throws Exception {
        String filePath = "/home/hadoop/data/emp.txt";
        String sql = "load data local inpath '" + filePath + "' overwrite into table emp";
        System.out.println("Running: " + sql);
        stmt.execute(sql);
    }

    // 查詢數據
    @Test
    public void selectData() throws Exception {
        String sql = "select * from emp";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        System.out.println("員工編號" + "\t" + "員工姓名" + "\t" + "工作崗位");
        while (rs.next()) {
            System.out.println(rs.getString("empno") + "\t\t" + rs.getString("ename") + "\t\t" + rs.getString("job"));
        }
    }

    // 統計查詢(會運行mapreduce作業)
    @Test
    public void countData() throws Exception {
        String sql = "select count(1) from emp";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getInt(1) );
        }
    }

    // 刪除數據庫
    @Test
    public void dropDatabase() throws Exception {
        String sql = "drop database if exists hive_jdbc_test";
        System.out.println("Running: " + sql);
        stmt.execute(sql);
    }

    // 刪除數據庫表
    @Test
    public void deopTable() throws Exception {
        String sql = "drop table if exists emp";
        System.out.println("Running: " + sql);
        stmt.execute(sql);
    }

    // 釋放資源
    @After
    public void destory() throws Exception {
        if ( rs != null) {
            rs.close();
        }
        if (stmt != null) {
            stmt.close();
        }
        if (conn != null) {
            conn.close();
        }
    }
}

 

退出beeline

!quit

退出hiveserver2

ctrl + c

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM