设为首页 加入收藏

TOP

MapReduce编程实战(二)
2014-11-23 23:57:05 来源: 作者: 【 】 浏览:23
Tags:MapReduce 编程 实战
(MaxTemperatureReduce.class);


conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);


JobClient.runJob(conf);
}
}


class MaxTemperatureMapper extends MapReduceBase implements
Mapper {
private static final int MISSING = 9999;


public void map(LongWritable key, Text value,
OutputCollector output, Reporter reporter)
throws IOException {
String line = value.toString();
String year = line.substring(15, 19);
int airTemperature;
if (line.charAt(87) == '+') {
airTemperature = Integer.parseInt(line.substring(88, 92));
} else {
airTemperature = Integer.parseInt(line.substring(87, 92));
}
String quality = line.substring(92, 93);
if (airTemperature != MISSING && quality.matches("[01459]")) {
output.collect(new Text(year), new IntWritable(airTemperature));
}
}
}


class MaxTemperatureReduce extends MapReduceBase implements
Reducer {
public void reduce(Text key, Iterator values,
OutputCollector output, Reporter reporter)
throws IOException {
int maxValue = Integer.MIN_VALUE;
while (values.hasNext()) {
maxValue = Math.max(maxValue, values.next().get());
}
output.collect(key, new IntWritable(maxValue));


}
}


抽象类方式


import java.io.IOException;
import java.util.Iterator;


import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class NewMaxTemperature {


public static void main(String[] args) throws Exception {


Job job = new Job();
job.setJarByClass(NewMaxTemperature.class);


// FileInputFormat.setInputPaths(job, new Path(args[0]));
// FileOutputFormat.setOutputPath(job, new Path(args[1]));


FileInputFormat.setInputPaths(job, new Path("/hadooptemp/input/2"));
FileOutputFormat.setOutputPath(job, new Path("/hadooptemp/output"));


job.setMapperClass(NewMaxTemperatureMapper.class);
job.setReducerClass(NewMaxTemperatureReduce.class);


job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);


System.exit(job.waitForCompletion(true) 0 : 1);
}
}


class NewMaxTemperatureMapper extends
Mapper {
private static final int MISSING = 9999;


public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
String year = line.substring(15, 19);
int airTemperature;
if (line.charAt(87) == '+') {
airTemperature = Integer.parseInt(line.substring(88, 92));
} else {
airTemperature = Integer.parseInt(line.substring(87, 92));
}
String quality = line.substring(92, 93);
if (airTemperature != MISSING && quality.matches("[01459]")) {
context.write(new Text(year), new IntWritable(airTemperature));
}
}
}


class NewMaxTemperatureReduce extends
Reducer {
public void reduce(Text key, Iterator values, Context context)
throws IOException, InterruptedException {
int maxValue = Integer.MIN_VALUE;
while (values.hasNext()) {
maxValue = Math.max(maxValue, values.next().get());
}
context.write(key, new IntWritable(maxValue));


}
}


首页 上一页 1 2 下一页 尾页 2/2/2
】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
分享到: 
上一篇C语言中字符串结束符探究 下一篇Ruby解析XML文件

评论

帐  号: 密码: (新用户注册)
验 证 码:
表  情:
内  容: