cd /usr/local/spark ./bin/spark-shell --master local val textFile=sc.textFile("file:////home/hadoop/Desktop/tem/1.txt") val wordCount=textFile.flatMap(line=>line.split(" ")).map(word=>(word,1))reduceByKey((a,b)=>a+b) wordCount.collect() wordCount.foreach(println)
object SimpleApp { def main(args: Array[String]) { val logFile = "file:///usr/local/spark/README.md" // Should be some file on your system val conf = new SparkConf().setAppName("Simple Application") val sc = new SparkContext(conf) val logData = sc.textFile(logFile, 2).cache() val numAs = logData.filter(line => line.contains("a")).count() val numBs = logData.filter(line => line.contains("b")).count() println("Lines with a: %s, Lines with b: %s".format(numAs, numBs)) } }