要求Spark版本2.3以上,親測2.2無效
配置
config("spark.sql.sources.partitionOverwriteMode","dynamic")
注意
1、saveAsTable方法無效,會全表覆蓋寫,需要用insertInto,詳情見代碼
2、insertInto需要主要DataFrame列的順序要和Hive表里的順序一致,不然會數據錯誤!
package com.dkl.blog.spark.hive
import org.apache.spark.sql.SparkSession
/**
-
Created by dongkelun on 2020/1/16 15:25
-
博客:Spark 覆蓋寫Hive分區表,只覆蓋部分對應分區
-
要求Spark版本2.3以上
*/
object SparkHivePartitionOverwrite {
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder()
.appName("SparkHivePartitionOverwrite")
.master("local")
.config("spark.sql.parquet.writeLegacyFormat", true)
.config("spark.sql.sources.partitionOverwriteMode","dynamic")
.enableHiveSupport()
.getOrCreate()import spark.sql
val data = Array(("001", "張三", 21, "2018"), ("002", "李四", 18, "2017"))
val df = spark.createDataFrame(data).toDF("id", "name", "age", "year")
//創建臨時表
df.createOrReplaceTempView("temp_table")val tableName="test_partition"
//切換hive的數據庫
sql("use test")
// 1、創建分區表,並寫入數據
df.write.mode("overwrite").partitionBy("year").saveAsTable(tableName)spark.table(tableName).show()
val data1 = Array(("011", "Sam", 21, "2018"))
val df1 = spark.createDataFrame(data1).toDF("id", "name", "age", "year")
// df1.write.mode("overwrite").partitionBy("year").saveAsTable(tableName) //不成功,全表覆蓋
// df1.write.mode("overwrite").format("Hive").partitionBy("year").saveAsTable(tableName) //不成功,全表覆蓋
df1.write.mode("overwrite").insertInto(tableName)spark.table(tableName).show()
spark.stop
}
}
結果
+---+----+---+----+
| id|name|age|year|
+---+----+---+----+
|002| 李四| 18|2017|
|001| 張三| 21|2018|
+---+----+---+----+
+---+----+---+----+
| id|name|age|year|
+---+----+---+----+
|011| Sam| 21|2018|
+---+----+---+----+