Spark 2.0以前版本: val sparkConf = new SparkConf().setAppName("soyo") val spark = new SparkContext(sparkConf) Spark 2.0以后版本:(上面的寫法兼容) 直接用SparkSession: val spark = SparkSession .builder .appName("soyo") .getOrCreate() var tc = spark.sparkContext.parallelize(數據).cache()
import org.apache.spark.{SparkConf, SparkContext} object text { def main(args: Array[String]): Unit = { // val conf=new SparkConf().setAppName("測試").setMaster("local[2]") // val sc= new SparkContext(conf)
// val file=sc.textFile("file:///home/soyo/桌面/spark編程測試數據/1.txt")
val spark=SparkSession.builder().getOrCreate() // val file=spark.read.textFile("file:///home/soyo/桌面/spark編程測試數據/1.txt").rdd
val file=spark.sparkContext.textFile("file:///home/soyo/桌面/spark編程測試數據/1.txt") val word=file.flatMap(lines=>lines.split(" ")).map(word=>(word,1)).reduceByKey(_+_) word.foreach(println) } }
都好使!!-------2.2.0