您的位置:首页 > 运维架构

hadoop-wordcount demo 分类: hadoop 2015-04-19 15:51 47人阅读 评论(0) 收藏

2015-04-19 15:51 197 查看
package com.billstudy.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
* 单词统计
* @author Bill
* @since V1.0 2015年4月19日 - 上午11:11:06
*/
public class WordCount {

private static final Path INPUT_PATH = new Path("hdfs://hadoop01:9000/word_count_in/");

private static final Path OUTPUT_PATH = new Path("hdfs://hadoop01:9000/word_count_out4/");

public static void main(String[] args) throws Exception {
Configuration configuration = new Configuration();

// 1.创建任务,指定任务名
Job job = new Job(configuration,WordCount.class.getSimpleName());

// 2.指定输入文件
FileInputFormat.setInputPaths(job, INPUT_PATH);

// 3.指定输出文件
FileOutputFormat.setOutputPath(job, OUTPUT_PATH);

// 4.指定Mapper
job.setMapperClass(WordCountMapper.class);

// 5.指定Reduce
job.setReducerClass(WordCountReduce.class);

// 6.指定Reduce 输出类型
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);

int exitCode = job.waitForCompletion(true) ? 0 : 1;
System.err.println("process finsh , exit code is : " + exitCode);
System.exit(exitCode);

/*Set<Entry<String, String>> entrySet = System.getenv().entrySet();

for (Entry<String, String> entry : entrySet) {
System.out.println(entry.getKey() + ":" + entry.getValue());
}*/
}

/**
* 单词处理
* 读取HDFS文件系统指定文件每一行,对每一对键值进行mapper
* @author Bill
* @since V1.0 2015年4月19日 - 上午11:17:03
*/										/** 单词偏移量	 	     行文本 	行中单词	单词出现次数 **/
static class WordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable>{
/** 默认出现一次 **/
private static final LongWritable DEFAULT_ONE_LONG = new LongWritable(1L);
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {

String[] words = value.toString().split(" ");
for (String word : words) {
context.write(new Text(word), DEFAULT_ONE_LONG);
}
}
}

/**
* 对mapper处理过的信息进一步统计并输出
* @author Bill
* @since V1.0 2015年4月19日 - 上午11:23:28
*/										/** 行中单词	单词出现次数	 输出类型	输出次数类型	 **/
static class WordCountReduce extends Reducer<Text, LongWritable, Text, LongWritable>{

@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context)
throws IOException, InterruptedException {

long times = 0L;

for (LongWritable count : values) {
times += count.get();
}

context.write(key, new LongWritable(times));

}

}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐