您的位置:首页 > 其它

使用MapReduce实现knn算法

2016-01-21 13:49 218 查看
算法的流程

(1)首先将训练集以共享文件的方式分发到各个map节点

(2)每一个map节点主要<LongWritable ,Text,,LongWritable,ListWritable<DoubleWritable>> LongWritable 主要就是文件的偏移地址,保证唯一。ListWritable主要就是最近的类别。

Reduce节点主要计算出,每一个要预测节点的类别。

package knn;

public class Distance {

public static double EuclideanDistance(double[] a, double[] b)

throws Exception {

if (a.length != b.length)

throw new Exception("size not compatible!");

double sum = 0.0;

for (int i = 0; i < a.length; i++) {

sum += Math.pow(a[i] - b[i], 2);

}

return Math.sqrt(sum);

}

}

package knn;

import java.io.BufferedReader;

/**

* KNearestNeigbour Classifier each instance in training set is of form

* a1,a2,a3...an,l1 in which l1 represents the label. and each instance in

* predict set is of form a1,a2,a3...an,-1,in which -1 is the label we want to

* specify. In my algorithm,I assume that the trainning set is relatively small

* so we can load them in memory and the predict set is large another thing we

* need to pay attention to is that all our test instances are all in one file

* so that the index of line is unique to each instance.

*

*/

public class KNearestNeighbour {

public static class KNNMap

extends

Mapper<LongWritable, Text, LongWritable, ListWritable<DoubleWritable>> {

private int k;

private ArrayList<Instance> trainSet;

@Override

protected void setup(Context context) throws IOException,

InterruptedException {

k = context.getConfiguration().getInt("k", 1);

trainSet = new ArrayList<Instance>();

Path[] trainFile = DistributedCache.getLocalCacheFiles(context

.getConfiguration());

// add all the tranning instances into attributes

BufferedReader br = null;

String line;

for (int i = 0; i < trainFile.length; i++) {

br = new BufferedReader(new FileReader(trainFile[0].toString()));

while ((line = br.readLine()) != null) {

Instance trainInstance = new Instance(line);

System.out.println(trainInstance.toString());

trainSet.add(trainInstance);

}

}

}

/**

* find the nearest k labels and put them in an object of type

* ListWritable. and emit <textIndex,lableList>

*/

@Override

public void map(LongWritable textIndex, Text textLine, Context context)

throws IOException, InterruptedException {

System.out.println(textLine.toString());

// distance stores all the current nearst distance value

// . trainLable store the corresponding lable

ArrayList<Double> distance = new ArrayList<Double>(k);

ArrayList<DoubleWritable> trainLable = new ArrayList<DoubleWritable>(

k);

for (int i = 0; i < k; i++) {

distance.add(Double.MAX_VALUE);

trainLable.add(new DoubleWritable(-1.0));

}

ListWritable<DoubleWritable> lables = new ListWritable<DoubleWritable>(

DoubleWritable.class);

Instance testInstance = new Instance(textLine.toString());

for (int i = 0; i < trainSet.size(); i++) {

try {

double dis = Distance.EuclideanDistance(trainSet.get(i)

.getAtrributeValue(), testInstance

.getAtrributeValue());

int index = indexOfMax(distance);

if (dis < distance.get(index)) {

distance.remove(index);

trainLable.remove(index);

distance.add(dis);

trainLable.add(new DoubleWritable(trainSet.get(i)

.getLable()));

}

} catch (Exception e) {

e.printStackTrace();

}

}

lables.setList(trainLable);

context.write(textIndex, lables);

}

/**

* return the index of the maximum number of an array

*

* @param array

* @return

*/

public int indexOfMax(ArrayList<Double> array) {

int index = -1;

Double min = Double.MIN_VALUE;

for (int i = 0; i < array.size(); i++) {

if (array.get(i) > min) {

min = array.get(i);

index = i;

}

}

return index;

}

}

public static class KNNReduce

extends

Reducer<LongWritable, ListWritable<DoubleWritable>, NullWritable, DoubleWritable> {

@Override

public void reduce(LongWritable index,

Iterable<ListWritable<DoubleWritable>> kLables, Context context)

throws IOException, InterruptedException {

/**

* each index can actually have one list because of the assumption

* that the particular line index is unique to one instance.

*/

DoubleWritable predictedLable = new DoubleWritable();

for (ListWritable<DoubleWritable> val : kLables) {

try {

predictedLable = valueOfMostFrequent(val);

break;

} catch (Exception e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

}

context.write(NullWritable.get(), predictedLable);

}

public DoubleWritable valueOfMostFrequent(

ListWritable<DoubleWritable> list) throws Exception {

if (list.isEmpty())

throw new Exception("list is empty!");

else {

HashMap<DoubleWritable, Integer> tmp = new HashMap<DoubleWritable, Integer>();

for (int i = 0; i < list.size(); i++) {

if (tmp.containsKey(list.get(i))) {

Integer frequence = tmp.get(list.get(i)) + 1;

tmp.remove(list.get(i));

tmp.put(list.get(i), frequence);

} else {

tmp.put(list.get(i), new Integer(1));

}

}

// find the value with the maximum frequence.

DoubleWritable value = new DoubleWritable();

Integer frequence = new Integer(Integer.MIN_VALUE);

Iterator<Entry<DoubleWritable, Integer>> iter = tmp.entrySet()

.iterator();

while (iter.hasNext()) {

Map.Entry<DoubleWritable, Integer> entry = (Map.Entry<DoubleWritable, Integer>) iter

.next();

if (entry.getValue() > frequence) {

frequence = entry.getValue();

value = entry.getKey();

}

}

return value;

}

}

}

public static void main(String[] args) throws IOException,

InterruptedException, ClassNotFoundException {

Job kNNJob = new Job();

kNNJob.setJobName("kNNJob");

kNNJob.setJarByClass(KNearestNeighbour.class);

DistributedCache.addCacheFile(URI.create(args[2]), kNNJob

.getConfiguration());

kNNJob.getConfiguration().setInt("k", Integer.parseInt(args[3]));

kNNJob.setMapperClass(KNNMap.class);

kNNJob.setMapOutputKeyClass(LongWritable.class);

kNNJob.setMapOutputValueClass(ListWritable.class);

kNNJob.setReducerClass(KNNReduce.class);

kNNJob.setOutputKeyClass(NullWritable.class);

kNNJob.setOutputValueClass(DoubleWritable.class);

kNNJob.setInputFormatClass(TextInputFormat.class);

kNNJob.setOutputFormatClass(TextOutputFormat.class);

FileInputFormat.addInputPath(kNNJob, new Path(args[0]));

FileOutputFormat.setOutputPath(kNNJob, new Path(args[1]));

kNNJob.waitForCompletion(true);

System.out.println("finished!");

}

}

package knn;

public class Instance {

private double[] attributeValue;

private double lable;

/**

* a line of form a1 a2 ...an lable

*

* @param line

*/

public Instance(String line) {

System.out.println(line);

String[] value = line.split(" ");

attributeValue = new double[value.length - 1];

for (int i = 0; i < attributeValue.length; i++) {

attributeValue[i] = Double.parseDouble(value[i]);

System.out.print(attributeValue[i] + "\t");

}

lable = Double.parseDouble(value[value.length - 1]);

System.out.println(lable);

}

public double[] getAtrributeValue() {

return attributeValue;

}

public double getLable() {

return lable;

}

}

package knn;

import java.io.DataInput;

public class ListWritable<T extends Writable> implements Writable {

private List<T> list;

private Class<T> clazz;

public ListWritable() {

list = null;

clazz = null;

}

public ListWritable(Class<T> clazz) {

this.clazz = clazz;

list = new ArrayList<T>();

}

public void setList(List<T> list) {

this.list = list;

}

public boolean isEmpty() {

return list.isEmpty();

}

public int size() {

return list.size();

}

public void add(T element) {

list.add(element);

}

public void add(int index, T element) {

list.add(index, element);

}

public T get(int index) {

return list.get(index);

}

public T remove(int index) {

return list.remove(index);

}

public void set(int index, T element) {

list.set(index, element);

}

@Override

public void write(DataOutput out) throws IOException {

out.writeUTF(clazz.getName());

out.writeInt(list.size());

for (T element : list) {

element.write(out);

}

}

@SuppressWarnings("unchecked")

@Override

public void readFields(DataInput in) throws IOException {

try {

clazz = (Class<T>) Class.forName(in.readUTF());

} catch (ClassNotFoundException e1) {

// TODO Auto-generated catch block

e1.printStackTrace();

}

int count = in.readInt();

this.list = new ArrayList<T>();

for (int i = 0; i < count; i++) {

try {

T obj = clazz.newInstance();

obj.readFields(in);

list.add(obj);

} catch (InstantiationException e) {

e.printStackTrace();

} catch (IllegalAccessException e) {

e.printStackTrace();

}

}

}

}

训练集

1.0 2.0 3.0 1

1.0 2.1 3.1 1

0.9 2.2 2.9 1

3.4 6.7 8.9 2

3.0 7.0 8.7 2

3.3 6.9 8.8 2

2.5 3.3 10.0 3

2.4 2.9 8.0 3
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: