您的位置:首页 > 运维架构

hadoop个人理解

2014-10-10 15:32 99 查看


 

2 hadoop三种方式

Local:

需要改hadoop-site.xml—的java——home

Hadoop-default是主要配置,不需要更改

命令:

hadoop/bin/hadoop demo.hadoop.HadoopGrep log文件所在目录 任意的输出目录 grep的字符串

3 重写例子

package com.hadoop.example;

import java.io.File;
import java.util.ArrayList;
import java.util.List;

import org.apache.catalina.filters.AddDefaultCharsetFilter;

import com.sun.org.apache.bcel.internal.generic.NEW;

public class HReal {

public static void main(String[] args) {

}
public void work(){
JobConf conf = null;
Mapper mapper = new Mapper();
Reducer reducer = new Reducer();
InputFormat inputFormat = new InputFormat();
OutputFromat outputFromat = new OutputFromat();
String inpathString ="";
String outpathString="";

JobTracker jobTracker = new JobTracker();
Job job = new Job();
jobTracker.addJob(job);
if(job.flg=true){

}

}
}

class Mapper{
private String kString;
private String vString;
public void deal(Class c){

}
private void deal(String s){

}

public SequenceFile deal(String kString,String vString){
this.kString=kString;
this.vString=vString;
return new SequenceFile();
}
}
class SequenceFile{

}
class Reducer{
private String resultString;
private String middleString;
public  String  deal(){
return resultString;
}
}
class InputFormat{

public RecordReader newReader(){
return new RecordReader();
}

public Object format(String line){
return line;
}

public Object  format(String line, Object type){
if(type instanceof String){
return line;
}
if(type instanceof Class){
return line.getClass();
}
return line;
}

public FileSplist getSplits(String pathfile){
File file=new File(pathfile);
//分割文件处理
return new FileSplist();
}
}
class FileSplist{

}
class OutputFromat{
public void write(String result){

}
}
class NameNode{

}
class DataNode{

}
class Master{
private JobTracker jobTracker;
private NameNode nameNode;
}
class JobTracker{
private MapReduce mapReduce;
private InputFormat inputFormat;
List<Job> jobs = new ArrayList<Job>();
public void addJob(Job job){
jobs.add(job);
}
public void nextJob(){

jobs.get(this.getCurrentFlg());
}
public int getCurrentFlg(){
return 1;
}
public void currState(){

}
}
class MapReduce{

}
class JobConf{

}
class Job{
static boolean flg = false;
public void run(){
System.out.println("deal");
flg=true;
}
}
class MapperTask{
List<FileSplist> lst = new ArrayList<FileSplist>();
InputFormat inputFormat;
public void addTask(FileSplist fileSplist){
lst.add(fileSplist);
}

public void read(){
//		lst.get(0)
}
}
class Queue{
List lst = new ArrayList();
public void addMapperTask(MapperTask mapperTask){
lst.add(mapperTask);
}
}
class TaskTracker{
int taskCount=0;
JobTracker jobTracker;

public void nextMapReduce(){
jobTracker.nextJob();
}
public void deal(){

}
//定时报告任务情况
public void schele(){
System.out.println("当前任务完成了多少了");
jobTracker.currState();
if(taskCount ==3){
nextMapReduce();
}
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
class RecordReader{

}
class ReducerTask{
Reducer reducer;
OutputFromat outputFromat;
public void sort(){

}
public void merge(){

}
public void deal(){
sort();
merge();
String resultString = reducer.deal();
outputFromat.write(resultString);
}
}


 

 
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: