/** * # _*_ coding:utf-8 _*_ * # Author:xiaoshubiao * # Time : 2020/5/14 8:33 **/ import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.Function2; import scala.Tuple2; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class union_test { public static void main(String[] args) { SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("union_test"); JavaSparkContext sc = new JavaSparkContext(conf); List<String> list = Arrays.asList("a","b","c","d","e"); JavaRDD<String> parallelize = sc.parallelize(list,2); JavaPairRDD<String, Integer> stringIntegerJavaPairRDD = parallelize.mapToPair(x -> new Tuple2<>(x, 1)); //正序 stringIntegerJavaPairRDD.sortByKey().collect().forEach(x->System.out.println(x)); // 倒序 stringIntegerJavaPairRDD.sortByKey(false).collect().forEach(x->System.out.println(x)); } }