您的位置:首页 > 运维架构

hadoop学习(3)

2015-06-15 19:10 148 查看
求学生成绩平均值 数据格式 学生名称 成绩

Map函数

package com.hadoop.myhadoop1;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class Map extends Mapper<LongWritable,Text,Text,LongWritable> {
@Override
protected void map(LongWritable key, Text value,
Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
String line = value.toString();
System.out.println(line);
StringTokenizer tokel = new StringTokenizer(line,"\n");
while (tokel.hasMoreTokens()) {
StringTokenizer toke2 = new StringTokenizer(tokel.nextToken());
String strname = toke2.nextToken();
String strscorent = toke2.nextToken();
Text name = new Text(strname);
LongWritable scorent = new LongWritable(Integer.parseInt(strscorent));
context.write(name, scorent);
}
}
}


Reduce函数

package com.hadoop.myhadoop1;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class Reduce extends Reducer<Text, LongWritable, Text, LongWritable> {
@Override
protected void reduce(Text key, Iterable<LongWritable> value,
Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
int sum = 0;
int count = 0;

for (LongWritable scort : value) {
sum += scort.get();
count++;
}
long aver = sum/count;
context.write(key, new LongWritable(aver));
}
}


Main函数

package com.hadoop.myhadoop1;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class Process {
public static void main(String[] args) throws IOException, ReflectiveOperationException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.default.name", "hdfs://192.168.1.108:9000");
String[] ars = new String[]{"input","output"};
String[] otherArgs = new GenericOptionsParser(conf,ars).getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage:wodcount");
System.exit(2);
}
Job job = new Job(conf, "Score_Process");
job.setJarByClass(Process.class);
job.setMapperClass(Map.class);

job
4000
.setCombinerClass(Reduce.class);
job.setReducerClass(Reduce.class);

job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);

FileInputFormat.setInputPaths(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

System.out.println(job.waitForCompletion(true)? 1 : 0);
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  hadoop