import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class DelRep { public static class Map extends Mapper<Object,Text,Text,Text>{ private static Text line = new Text(); public void map(Object key,Text value,Context context) throws IOException, InterruptedException{ line = value; context.write(line, new Text("")); } } public static class Reduce extends Reducer<Text,Text,Text,Text>{ public void reduce(Text key,Iterable<Text> value,Context context) throws IOException, InterruptedException{ context.write(key, new Text("")); } } public static void main(String[] Args) throws IOException, ClassNotFoundException, InterruptedException{ Configuration conf = new Configuration(); conf.addResource(new Path("/usr/hadoop-1.0.3/conf/core-site.xml")); String[] otherArgs = new GenericOptionsParser(conf,Args).getRemainingArgs(); Job job = new Job(conf,"DelRep"); job.setJarByClass(DelRep.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); } }
File1:
2006-6-9 a 2006-6-10 b 2006-6-11 c 2006-6-12 d 2006-6-13 a 2006-6-14 b 2006-6-15 c 2006-6-11 c
File2
2006-6-9 d 2006-6-10 a 2006-6-11 b 2006-6-12 d 2006-6-13 a 2006-6-14 c 2006-6-15 d 2006-6-11 c
结果:
2006-6-10 a 2006-6-10 b 2006-6-11 b 2006-6-11 c 2006-6-12 d 2006-6-13 a 2006-6-14 b 2006-6-14 c 2006-6-15 c 2006-6-15 d 2006-6-9 a 2006-6-9 d