mapreduce 求最大值

主要运用到了cleanup函数,该函数在map或reduce执行完毕之后再执行

static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
long max = Long.MIN_VALUE;
protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
final long temp = Long.parseLong(v1.toString());
if(temp>max){
max = temp;
}
};

protected void cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context) throws java.io.IOException ,InterruptedException {
context.write(new LongWritable(max), NullWritable.get());
};
}

static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
long max = Long.MIN_VALUE;
protected void reduce(LongWritable k2, java.lang.Iterable<NullWritable> arg1, Context arg2) throws Exception {
final long temp = k2.get();
if(temp>max){
max = temp;
}
};

protected void cleanup(Context context) throws Exception {
context.write(new LongWritable(max), NullWritable.get());
};
}

---------------------------------

 1 package suanfa;
 2 
 3 import java.net.URI;
 4 
 5 
 6 import org.apache.hadoop.conf.Configuration;
 7 import org.apache.hadoop.fs.FileSystem;
 8 import org.apache.hadoop.fs.Path;
 9 import org.apache.hadoop.io.LongWritable;
10 import org.apache.hadoop.io.NullWritable;
11 import org.apache.hadoop.io.Text;
12 import org.apache.hadoop.mapreduce.Job;
13 import org.apache.hadoop.mapreduce.Mapper;
14 import org.apache.hadoop.mapreduce.Reducer;
15 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
16 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
17 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
18 /**
19  * 孟令军
20  */
21 public class TopKApp {
22     static final String INPUT_PATH = "hdfs://mlj:9000/seq";
23     static final String OUT_PATH = "hdfs://mlj:9000/seq_out";
24     
25     public static void main(String[] args) throws Exception {
26         Configuration conf = new Configuration();
27         final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
28         final Path outPath = new Path(OUT_PATH);
29         if(fileSystem.exists(outPath)){
30             fileSystem.delete(outPath, true);
31         }
32         
33         final Job job = new Job(conf , TopKApp.class.getSimpleName());
34         job.setJarByClass(TopKApp.class);
35         FileInputFormat.setInputPaths(job, INPUT_PATH);
36         job.setMapperClass(MyMapper.class);
37         job.setReducerClass(MyReducer.class);
38         job.setOutputKeyClass(LongWritable.class);
39         job.setOutputValueClass(NullWritable.class);
40         FileOutputFormat.setOutputPath(job, outPath);
41         job.waitForCompletion(true);
42     }
43     static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
44         long max = Long.MIN_VALUE;
45         protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
46             final long temp = Long.parseLong(v1.toString());
47             if(temp>max){
48                 max = temp;
49             }
50         };
51         
52         protected void cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context) throws java.io.IOException ,InterruptedException {
53             context.write(new LongWritable(max), NullWritable.get());
54         };
55     }
56     
57     static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
58         long max = Long.MIN_VALUE;
59         protected void reduce(LongWritable k2, Iterable<NullWritable> arg1, Context arg2) throws Exception {
60             final long temp = k2.get();
61             if(temp>max){
62                 max = temp;
63             }
64         };
65         
66         protected void cleanup(Context context) throws java.io.IOException ,InterruptedException {
67             context.write(new LongWritable(max), NullWritable.get());
68         };
69     }        
70 }

 

posted @ 2015-05-05 17:33  孟想阳光  阅读(953)  评论(0)    收藏  举报