您的位置:首页 > 其它

MapReduce案例学习(4) 求各个城市的员工的总工资

2015-09-20 16:05 495 查看
设计思路:

map阶段:将城市作为key,员工工资作为value输出;

reduce阶段:将相同key汇总,遍历value获得工资进行叠加处理计算总工资。

package week06;

import java.io.IOException;

import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

//4) 求各个城市的员工的总工资
public class Emp_Test4 extends Configured implements Tool {

	/**
	 * 计数器 用于计数各种异常数据
	 */
	enum Counter {
		LINESKIP,
	}

	/**
	 * MAP任务
	 */
	public static class Map extends
			Mapper<LongWritable, Text, Text, IntWritable> {

		@Override
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {

			String line = value.toString();// 每行文件
			// 输入文件首行,不处理
			if (line.contains("empno") == true) {
				return;
			}

			Employee emp = Employee.parser(line);
			if (emp.isValid()) {
				context.write(new Text(emp.getLoc()),
						new IntWritable(emp.getSal()));
			} else {
				context.getCounter(Counter.LINESKIP).increment(1); // 出错令计数器+1
				return;
			}
		}
	}

	/**
	 * REDUCE
	 */
	public static class Reduce extends Reducer<Text, IntWritable, Text, Text> {

		@Override
		public void reduce(Text key, Iterable<IntWritable> values,
				Context context) throws IOException, InterruptedException {
			int sum = 0;
			for (IntWritable value : values) {
				sum += value.get();
			}
			context.write(key, new Text("" + sum));
		}
	}

	public int run(String[] args) throws Exception {
		Configuration conf = getConf();

		conf.set("mapred.job.tracker", "192.168.1.201:9001");
		String[] ioArgs = new String[] { "emp_in", "emp_out_test4" };
		String[] otherArgs = new GenericOptionsParser(conf, ioArgs)
				.getRemainingArgs();
		if (otherArgs.length != 2) {
			System.err.println("Usage: Test < input path > < output path >");
			System.exit(2);
		}

		Job job = new Job(conf, "week06_test_04"); // 任务名
		job.setJarByClass(Emp_Test4.class); // 指定Class

		FileInputFormat.addInputPath(job, new Path(otherArgs[0])); // 输入路径
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); // 输出路径

		job.setMapperClass(Map.class); // 调用上面Map类作为Map任务代码
		job.setReducerClass(Reduce.class);// 调用上面Reduce类作为Reduce任务代码

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(IntWritable.class);

		job.setOutputKeyClass(Text.class); // 指定输出的KEY的格式
		job.setOutputValueClass(Text.class); // 指定输出的VALUE的格式

		job.waitForCompletion(true);

		// 输出任务完成情况
		System.out.println("任务名称:" + job.getJobName());
		System.out.println("任务成功:" + (job.isSuccessful() ? "是" : "否"));
		System.out.println("输入行数:"
				+ job.getCounters()
						.findCounter("org.apache.hadoop.mapred.Task$Counter",
								"MAP_INPUT_RECORDS").getValue());
		System.out.println("输出行数:"
				+ job.getCounters()
						.findCounter("org.apache.hadoop.mapred.Task$Counter",
								"MAP_OUTPUT_RECORDS").getValue());
		System.out.println("跳过的行:"
				+ job.getCounters().findCounter(Counter.LINESKIP).getValue());
		return job.isSuccessful() ? 0 : 1;
	}

	/**
	 * 设置系统说明 设置MapReduce任务
	 */
	public static void main(String[] args) throws Exception {
		// 记录开始时间
		DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
		Date start = new Date();

		// 运行任务
		int res = ToolRunner.run(new Configuration(), new Emp_Test4(), args);

		// 输出任务耗时
		Date end = new Date();
		float time = (float) ((end.getTime() - start.getTime()) / 60000.0);
		System.out.println("任务开始:" + formatter.format(start));
		System.out.println("任务结束:" + formatter.format(end));
		System.out.println("任务耗时:" + String.valueOf(time) + " 分钟");

		System.exit(res);
	}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: