您的位置:首页 > 编程语言 > Java开发

hdfs java API

2016-08-09 14:00 351 查看
package com.hadoop.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.io.IOUtils;

public class HdfsPractise {
public static void main(String[] args) throws IOException {
//makeDir();
//touchFile();
//uploadFromLocal();
//reName();
//delFile();
//searchBlock();
//getNodeInfo();
}

private static void makeDir() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");
FileSystem fs = FileSystem.get(conf);
if(!fs.exists(new Path("/test4"))) {
fs.mkdirs(new Path("/test4"));
}
}
private static void touchFile() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");
FileSystem fs = FileSystem.get(conf);
//fs.setReplication(new Path("/test1"), (short) 1);
FSDataOutputStream out = fs.create(new Path("/test2/sample_age_data.txt"));
FileInputStream in = new FileInputStream("d:\\sample_age_data.txt");
IOUtils.copyBytes(in, out, 1024, true);
}
private static void uploadFromLocal() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");
FileSystem fs = FileSystem.get(conf);
fs.copyFromLocalFile(new Path("d:\\sample_age_data.txt"), new Path("/test2"));
}
private static void reName() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");
FileSystem fs = FileSystem.get(conf);
fs.rename(new Path("/test2/sample_age_data.txt"), new Path("/test2/fff"));
}
private static void delFile() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");
FileSystem fs = FileSystem.get(conf);
fs.delete(new Path("/test2"), true);
}
private static void searchBlock() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");
FileSystem fs = FileSystem.get(conf);
FileStatus fst = fs.getFileStatus(new Path("/test1/aaa.txt"));
BlockLocation[] bls=fs.getFileBlockLocations(fst, 0, fst.getLen());
for (int i = 0,h=bls.length; i < h; i++) {
String[] hosts= bls[i].getHosts();
System.out.println("block_"+i+"_location:  "+hosts[0]);
}
}
private static void getNodeInfo() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://118.192.153.114:8020");

FileSystem fs = FileSystem.get(conf);
DistributedFileSystem hdfs = (DistributedFileSystem) fs;
DatanodeInfo[] dns=hdfs.getDataNodeStats();
for (int i = 0,h=dns.length; i < h; i++) {
System.out.println("datanode_"+i+"_name:  "+dns[i].getHostName());
}
}

}


  
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: