您的位置:首页 > 编程语言 > Java开发

HDFS上的常用文件操作java code实现

2011-05-23 22:03 459 查看
首先说明下我的Hadoop是安装在单机上的,分布式环境下的文件操作应该类似。

以下代码中我实现了在HDFS上新建文件,追加内容到文件,删除文件,判断文件是否存在,显示文件内容和将文件的内容读到字符串中。我用的是hadoop-0.20.2,jdk-1.6.需要说明的是以append方式将内容添加到HDFS上文件的末尾,需要在hdfs-site.xml中添"<property><name>dfs.support.append</name><value>true</value></property>".

package hdfs;
import java.io.*;
import java.net.URI;

import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.fs.*;
public class FileOperation {

/**
* @ by zghe
*/
//create new file on hdfs
private static void createHdfsFile(String uri) throws IOException {
String src= "hello hdfs";
Configuration conf = new Configuration();
Path path = new Path(uri);
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.create(path) ;
out.write(src.getBytes());
out.flush();
out.close();
}
//append content to file on hdfs
private static void appendHdfsFile(String uri) throws IOException {
String src= " append hdfs";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.append(new Path(uri)) ;
out.write(src.getBytes());
out.flush();
out.close();
}
//upload localfile to hdfs
private static void uploadToHdfs(String uri) throws FileNotFoundException,IOException{
String localSrc = "/hello_hadoop";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.create(new Path(uri), new Progressable(){
public void progress(){
System.out.print(".");
}
}) ;
IOUtils.copyBytes(in, out, 4096, true);
}
//check the existence of file on hdfs
private static boolean isHdfsFileExist(String uri) throws IOException{
Configuration conf = new Configuration();
Path path = new Path(uri);
FileSystem fs = FileSystem.get(URI.create(uri),conf);
if(fs.exists(path)==true)
return true;
else
return false;
}
//delete the file on hdfs
private static void delHdfsFile(String uri) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
fs.deleteOnExit(new Path(uri));
fs.close();
}
//show the content of file on hdfs
public static void catHdfsFile(String uri) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
InputStream in = null;
try{
in = fs.open(new Path(uri));
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
//return String from file on hdfs
public static String getContent(String uri) throws IOException{
String s = new String();
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
InputStream in = null;
try{
in = fs.open(new Path(uri));
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String tem;
while((tem = br.readLine())!=null){
s = s.concat(tem);
}
System.out.println(s);
} finally {
IOUtils.closeStream(in);
}
return s;
}

public static void main(String[] args) throws IOException {
String uri = "hdfs://localhost:54310/usr/root/hello_hdfs";
FileOperation.createHdfsFile(uri); //create file 'hello_hdfs' on hdfs
FileOperation.catHdfsFile(uri); //content is 'hello hdfs'
System.out.println("");
FileOperation.uploadToHdfs(uri); //upload localfile 'hello_hadoop' to hello_hdfs
FileOperation.catHdfsFile(uri); //content is the file_content of 'hello_hadoop'----Hello Hadoop
FileOperation.appendHdfsFile(uri); //append string 'append hdfs' after 'hello hadoop'
FileOperation.getContent(uri); //content is 'Hello Hadoop append hdfs'
FileOperation.delHdfsFile(uri); //delete the file on hdfs
if(FileOperation.isHdfsFileExist(uri)==false)
System.out.println("file has been deleted");
}

}

测试结果是:

hello hdfs

Hello Hadoop

Hello Hadoop append hdfs

file has been deleted
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: