• java spark转换算子join、leftOuterJoin、rightOuterJoin、fullOuterjoin


    /**
     * # _*_ coding:utf-8 _*_
     * # Author:xiaoshubiao
     * # Time : 2020/5/14 8:33
     **/
    import org.apache.spark.SparkConf;
    import org.apache.spark.api.java.JavaPairRDD;
    import org.apache.spark.api.java.JavaRDD;
    import org.apache.spark.api.java.JavaSparkContext;
    import org.apache.spark.api.java.function.Function2;
    import scala.Tuple2;
    
    import java.util.ArrayList;
    import java.util.Arrays;
    import java.util.List;
    
    public class union_test {
        public static void main(String[] args) {
            SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("union_test");
            JavaSparkContext sc = new JavaSparkContext(conf);
            List<String> list = Arrays.asList("a","b","c","d","e");
            List<String> list2 = Arrays.asList("a","b","c","f","h");
            JavaRDD<String> parallelize = sc.parallelize(list,2);
            JavaRDD<String> parallelize2 = sc.parallelize(list2,2);
            JavaPairRDD javaPairRDD = parallelize.mapToPair(x -> new Tuple2(x, 1));
            JavaPairRDD javaPairRDD1 = parallelize2.mapToPair(x -> new Tuple2(x, 2));
            javaPairRDD.join(javaPairRDD1).collect().forEach(x->System.out.println("join"+x));
            javaPairRDD.leftOuterJoin(javaPairRDD1).collect().forEach(x->System.out.println("leftOuterJoin"+x));
            javaPairRDD.rightOuterJoin(javaPairRDD1).collect().forEach(x->System.out.println("rightOuterJoin"+x));
            javaPairRDD.fullOuterJoin(javaPairRDD1).collect().forEach(x->System.out.println("fullOuterJoin"+x));
    
    
        }
    }
  • 相关阅读:
    java 之 File类
    java 之 内部类
    java 之 泛型
    如何生成随机码
    sql中getdate()&convert的使用
    sql中Distinct&Count的用法
    C#中如何使用正则表达式
    Sql按照字段分组,选取其他字段最值所在的行记录
    为什么在属性中设置private set
    基本Sql语句汇总
  • 原文地址:https://www.cnblogs.com/7749ha/p/12888272.html
Copyright © 2020-2023  润新知