package date_20250401 import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} object task2 { def main(args: Array[String]): Unit = { val conf: SparkConf = new SparkConf().setMaster("local").setAppName("testRdd") val sc = new SparkContext(conf) val data: RDD[String] = sc.textFile("/Volumes/Data/04CodeData/gcc-project-25-2/spark-lesson/src/main/scala/date_20250401/data/word") data.flatMap(_.split(" ")) .map(x => (x, 1)) .reduceByKey(_ + _) .sortBy(_._2, false) .collect() .foreach(println) } }