mysql-->hive
0 參考文檔:
1.配置文件:mysql2hive.json
{
"job": {
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"column": ["id","username"],
"connection": [
{
"jdbcUrl": ["jdbc:mysql://192.168.43.20:3306/test"],
"table": ["target_user"]
}
],
"password": "111111",
"username": "root",
"where": ""
}
},
"writer": {
"name": "hdfswriter",
"parameter": {
"column": [
{"name":"id","type":"string"}
{"name":"username","type":"string"}
],
"compress": "gzip",
"defaultFS": "hdfs://192.168.43.20:8020",
"fieldDelimiter": ",",
"fileName": "target_user",
"fileType": "text",
"path": "/user/hive/warehouse/mysql2hive",
"writeMode": "append"
}
}
}
],
"setting": {
"speed": {
"channel": "1"
}
}
}
}
2 創建mysql數據:

3 創建對應的hive表
create table mysql2hive(
id int,
username string
)row format delimited
fields terminated by ","
lines terminated by "\n";
4 執行命令:
python datax.py mysql2hdfs.json
5 hive查看數據

hive-->mysql
1配置文件 hdfs2mysql.json
{
"job": {
"content": [
{
"reader": {
"name": "hdfsreader",
"parameter": {
"column": [
{"index":0,"type":"long"}
{"index":1,"type":"string"}
],
"defaultFS": "hdfs://192.168.43.20:8020",
"encoding": "UTF-8",
"fieldDelimiter": ",",
"fileType": "text",
"path": "/user/hive/warehouse/mysql2hive"
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"column": ["id","username"],
"connection": [
{
"jdbcUrl": "jdbc:mysql://192.168.43.20:3306/test",
"table": ["target_user"]
}
],
"password": "111111",
"preSql": [],
"session": [],
"username": "root",
"writeMode": "insert"
}
}
}
],
"setting": {
"speed": {
"channel": "1"
}
}
}
}
2 查看hive數據源:


3 准備好mysql 目標表

4 執行命令: python datax.py hdfs2mysql.json
5 查看mysql數據:

注意點: 1,如果mysql字段是數值型,而hive表字段是string,可以導入導入數據到hive表中
但是如果hive表的字段是string,導入對應的mysql字段是int型,就會報錯。