//此处有个大大的问题???现在只能用local模式运行,想放在集群上还不行!!!
object Spark {
def main(args :Array[String]): Unit ={
//创建一个scala版本的SparkContext
val conf =new SparkConf().setAppName("wordCount").setMaster("local")
//val conf =new SparkConf().setAppName("wordCount")
val sc=new SparkContext(conf)
val input =sc.textFile("hdfs://192.168.1.200:9000/input01")
val words =input.flatMap(line => line.split(" "))
val counts=words.map(word => (word,1)).reduceByKey{case(x,y) => x + y }
counts.saveAsTextFile("hdfs://192.168.1.200:9000/output05")
}
}