SparkSQL DataFrame数据类型转换


mport org.apache.spark.sql.types.{StructType,StructField,StringType, IntegerType, LongType}
import java.util.ArrayList
import org.apache.spark.sql._

val dataList = new util.ArrayList[Row]()
dataList.add(Row("ming",20,15552211521L))
dataList.add(Row("hong",19,13287994007L))
dataList.add(Row("zhi",21,15552211523L))

val df = sqlContext.createDataFrame(dataList,schema)

scala> df.printSchema
root
 |-- name: string (nullable = true)
 |-- age: integer (nullable = true)
 |-- phone: long (nullable = true)

// 修改数据类型
scala> val df1 = df.withColumn("age",col("age").cast(StringType))
df1: org.apache.spark.sql.DataFrame = [name: string, age: string, phone: bigint]

scala> df1.printSchema
root
 |-- name: string (nullable = true)
 |-- age: string (nullable = true)
 |-- phone: long (nullable = true)

 


免责声明!

本站转载的文章为个人学习借鉴使用,本站对版权不负任何法律责任。如果侵犯了您的隐私权益,请联系本站邮箱yoyou2525@163.com删除。



 
粤ICP备18138465号  © 2018-2025 CODEPRJ.COM