SparkSQL DataFrame數據類型轉換


mport org.apache.spark.sql.types.{StructType,StructField,StringType, IntegerType, LongType}
import java.util.ArrayList
import org.apache.spark.sql._

val dataList = new util.ArrayList[Row]()
dataList.add(Row("ming",20,15552211521L))
dataList.add(Row("hong",19,13287994007L))
dataList.add(Row("zhi",21,15552211523L))

val df = sqlContext.createDataFrame(dataList,schema)

scala> df.printSchema
root
 |-- name: string (nullable = true)
 |-- age: integer (nullable = true)
 |-- phone: long (nullable = true)

// 修改數據類型
scala> val df1 = df.withColumn("age",col("age").cast(StringType))
df1: org.apache.spark.sql.DataFrame = [name: string, age: string, phone: bigint]

scala> df1.printSchema
root
 |-- name: string (nullable = true)
 |-- age: string (nullable = true)
 |-- phone: long (nullable = true)

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM