您的位置:首页 > 运维架构

Hadoop 1.x MapReduce 模板类

2016-04-18 22:33 405 查看
package org.dragon.hadoop.mr;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class MapReduceModule extends Configured implements Tool{

//default Mapper
public static class ModuleMapper extends Mapper<LongWritable,Text,LongWritable,Text>{

@Override
protected void setup(Context context) throws IOException,
InterruptedException {
super.setup(context);
}

@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
super.map(key, value, context);
}

@Override
protected void cleanup(Context context) throws IOException,
InterruptedException {
super.cleanup(context);
}

}

//default Reducer
public static  class ModuleReducer extends Reducer<LongWritable,Text,LongWritable,Text>{

@Override
protected void setup(Context context)
throws IOException, InterruptedException {
super.setup(context);
}

@Override
protected void reduce(LongWritable key, Iterable<Text> values,
Context context)
throws IOException, InterruptedException {
super.reduce(key, values, context);
}

@Override
protected void cleanup(
Context context)
throws IOException, InterruptedException {
super.cleanup(context);
}

}

//Driver
@Override
public int run(String[] args) throws Exception {

/*args = new String[]{
"hdfs://hadoop-master.dragon.org:9000/opt/data/test/input/simple_file.txt",
"hdfs://hadoop-master.dragon.org:9000/opt/data/test/output7/"
};*/

//1、conf
Configuration conf = new Configuration();

//2、create job
Job job = new Job(conf,MapReduceModule.class.getSimpleName());

//3、set run jar
job.setJarByClass(DefaultMapReduce.class);

//4、set inputFormat
job.setInputFormatClass(TextInputFormat.class);

//5、set input path
FileInputFormat.addInputPath(job, new Path(args[0]));

//6、set mapper
job.setMapperClass(ModuleMapper.class);

//7、set map output key/value class
job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(Text.class);

//8、set partion
job.setPartitionerClass(HashPartitioner.class);

//9、set reduce number
job.setNumReduceTasks(1);

//10、set sort comparator class
//	job.setSortComparatorClass(LongWritable.Comparator.class);

//11、set group comparator class
//	job.setGroupingComparatorClass(LongWritable.Comparator.class);

//12、set combiner class
//	job.setCombinerClass(null);

//13、set reducer class
job.setReducerClass(ModuleReducer.class);

//14、set output class
job.setOutputFormatClass(TextOutputFormat.class);

//15、set job output key/value class
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(Text.class);

//16、set job output path
FileOutputFormat.setOutputPath(job, new Path(args[1]));

//17、submit job
boolean isSuccess = job.waitForCompletion(true);

return isSuccess?0:1;
}

//client
public static void main(String[] args) throws Exception {
int status = ToolRunner.run(new MapReduceModule(), args);

//exit
System.exit(status);
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  Hadoop 1.x MapReduce