[学习笔记]
Win7 Eclipse 搭建spark java1.8(lambda)环境:WordCount helloworld例子
lambda表达式是java8给我们带来的一个重量的新特性,借用lambda表达式可以让我们的程序设计更加简洁。
package com;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
import java.util.Arrays;
import java.util.List;
public class WordCountLambda {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("WordCountLambda马克-to-win @ 马克java社区:").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<String> lines = sc.textFile("E://temp//input//friend.txt");
JavaRDD<String> words = lines.flatMap(line -> Arrays.asList(line.split(" ")));
JavaPairRDD<String, Integer> wordAndOne = words.mapToPair(word -> new Tuple2<>(word, 1));
JavaPairRDD<String, Integer> results = wordAndOne.reduceByKey((x, y) -> x + y);
/* 下面一句也能工作。*/
// reduced.saveAsTextFile("E://temp//input//friend1.txt");
/*word:o1abc count:4
word:45 count:1
word:77 count:1*/
results.foreach(new VoidFunction<Tuple2<String,Integer>>() {
public void call(Tuple2<String, Integer> tuple) throws Exception {
System.out.println("word:" + tuple._1 + " count:" + tuple._2);
}
});
/*resultsPair is (o1abc,4)
resultsPair is (45,1)
resultsPair is (77,1)*/
List<Tuple2<String,Integer>> resultsPairs = results.collect();
for (Tuple2<String, Integer> resultsPair : resultsPairs) {
更多请见:https://blog.csdn.net/qq_44596980/article/details/93385254