HDFS上的常用文件操作java code实现
2011-05-23 22:03
459 查看
首先说明下我的Hadoop是安装在单机上的,分布式环境下的文件操作应该类似。
以下代码中我实现了在HDFS上新建文件,追加内容到文件,删除文件,判断文件是否存在,显示文件内容和将文件的内容读到字符串中。我用的是hadoop-0.20.2,jdk-1.6.需要说明的是以append方式将内容添加到HDFS上文件的末尾,需要在hdfs-site.xml中添"<property><name>dfs.support.append</name><value>true</value></property>".
package hdfs;
import java.io.*;
import java.net.URI;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.fs.*;
public class FileOperation {
/**
* @ by zghe
*/
//create new file on hdfs
private static void createHdfsFile(String uri) throws IOException {
String src= "hello hdfs";
Configuration conf = new Configuration();
Path path = new Path(uri);
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.create(path) ;
out.write(src.getBytes());
out.flush();
out.close();
}
//append content to file on hdfs
private static void appendHdfsFile(String uri) throws IOException {
String src= " append hdfs";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.append(new Path(uri)) ;
out.write(src.getBytes());
out.flush();
out.close();
}
//upload localfile to hdfs
private static void uploadToHdfs(String uri) throws FileNotFoundException,IOException{
String localSrc = "/hello_hadoop";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.create(new Path(uri), new Progressable(){
public void progress(){
System.out.print(".");
}
}) ;
IOUtils.copyBytes(in, out, 4096, true);
}
//check the existence of file on hdfs
private static boolean isHdfsFileExist(String uri) throws IOException{
Configuration conf = new Configuration();
Path path = new Path(uri);
FileSystem fs = FileSystem.get(URI.create(uri),conf);
if(fs.exists(path)==true)
return true;
else
return false;
}
//delete the file on hdfs
private static void delHdfsFile(String uri) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
fs.deleteOnExit(new Path(uri));
fs.close();
}
//show the content of file on hdfs
public static void catHdfsFile(String uri) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
InputStream in = null;
try{
in = fs.open(new Path(uri));
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
//return String from file on hdfs
public static String getContent(String uri) throws IOException{
String s = new String();
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
InputStream in = null;
try{
in = fs.open(new Path(uri));
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String tem;
while((tem = br.readLine())!=null){
s = s.concat(tem);
}
System.out.println(s);
} finally {
IOUtils.closeStream(in);
}
return s;
}
public static void main(String[] args) throws IOException {
String uri = "hdfs://localhost:54310/usr/root/hello_hdfs";
FileOperation.createHdfsFile(uri); //create file 'hello_hdfs' on hdfs
FileOperation.catHdfsFile(uri); //content is 'hello hdfs'
System.out.println("");
FileOperation.uploadToHdfs(uri); //upload localfile 'hello_hadoop' to hello_hdfs
FileOperation.catHdfsFile(uri); //content is the file_content of 'hello_hadoop'----Hello Hadoop
FileOperation.appendHdfsFile(uri); //append string 'append hdfs' after 'hello hadoop'
FileOperation.getContent(uri); //content is 'Hello Hadoop append hdfs'
FileOperation.delHdfsFile(uri); //delete the file on hdfs
if(FileOperation.isHdfsFileExist(uri)==false)
System.out.println("file has been deleted");
}
}
测试结果是:
hello hdfs
Hello Hadoop
Hello Hadoop append hdfs
file has been deleted
以下代码中我实现了在HDFS上新建文件,追加内容到文件,删除文件,判断文件是否存在,显示文件内容和将文件的内容读到字符串中。我用的是hadoop-0.20.2,jdk-1.6.需要说明的是以append方式将内容添加到HDFS上文件的末尾,需要在hdfs-site.xml中添"<property><name>dfs.support.append</name><value>true</value></property>".
package hdfs;
import java.io.*;
import java.net.URI;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.fs.*;
public class FileOperation {
/**
* @ by zghe
*/
//create new file on hdfs
private static void createHdfsFile(String uri) throws IOException {
String src= "hello hdfs";
Configuration conf = new Configuration();
Path path = new Path(uri);
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.create(path) ;
out.write(src.getBytes());
out.flush();
out.close();
}
//append content to file on hdfs
private static void appendHdfsFile(String uri) throws IOException {
String src= " append hdfs";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.append(new Path(uri)) ;
out.write(src.getBytes());
out.flush();
out.close();
}
//upload localfile to hdfs
private static void uploadToHdfs(String uri) throws FileNotFoundException,IOException{
String localSrc = "/hello_hadoop";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
OutputStream out = fs.create(new Path(uri), new Progressable(){
public void progress(){
System.out.print(".");
}
}) ;
IOUtils.copyBytes(in, out, 4096, true);
}
//check the existence of file on hdfs
private static boolean isHdfsFileExist(String uri) throws IOException{
Configuration conf = new Configuration();
Path path = new Path(uri);
FileSystem fs = FileSystem.get(URI.create(uri),conf);
if(fs.exists(path)==true)
return true;
else
return false;
}
//delete the file on hdfs
private static void delHdfsFile(String uri) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
fs.deleteOnExit(new Path(uri));
fs.close();
}
//show the content of file on hdfs
public static void catHdfsFile(String uri) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
InputStream in = null;
try{
in = fs.open(new Path(uri));
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
//return String from file on hdfs
public static String getContent(String uri) throws IOException{
String s = new String();
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
InputStream in = null;
try{
in = fs.open(new Path(uri));
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String tem;
while((tem = br.readLine())!=null){
s = s.concat(tem);
}
System.out.println(s);
} finally {
IOUtils.closeStream(in);
}
return s;
}
public static void main(String[] args) throws IOException {
String uri = "hdfs://localhost:54310/usr/root/hello_hdfs";
FileOperation.createHdfsFile(uri); //create file 'hello_hdfs' on hdfs
FileOperation.catHdfsFile(uri); //content is 'hello hdfs'
System.out.println("");
FileOperation.uploadToHdfs(uri); //upload localfile 'hello_hadoop' to hello_hdfs
FileOperation.catHdfsFile(uri); //content is the file_content of 'hello_hadoop'----Hello Hadoop
FileOperation.appendHdfsFile(uri); //append string 'append hdfs' after 'hello hadoop'
FileOperation.getContent(uri); //content is 'Hello Hadoop append hdfs'
FileOperation.delHdfsFile(uri); //delete the file on hdfs
if(FileOperation.isHdfsFileExist(uri)==false)
System.out.println("file has been deleted");
}
}
测试结果是:
hello hdfs
Hello Hadoop
Hello Hadoop append hdfs
file has been deleted
相关文章推荐
- Java实现远程HDFS的文件操作(新建、上传、下载、删除)
- JAVA API 实现hdfs文件操作
- Hadoop HDFS文件操作 Java实现类
- java实现对hdfs文件系统的上传,下载,删除,创建文件夹的操作演示
- FS Shell命令与JAVA实现操作HDFS文件
- java操作hdfs,实现文件上传、下载以及查看当前文件夹下所有文件
- 操作文件处理常用方法(Java实现)
- java操作hadoop hdfs,实现文件上传下载demo
- Java实现HDFS文件操作工具类
- JAVA实现:将文件从本地上传到HDFS上、从HDFS上读取等操作
- HDFS的文件操作(Java代码实现)
- Java实现远程HDFS的文件操作(新建、上传、下载、删除)
- Hadoop HDFS文件操作 Java实现类
- thrift实现HDFS文件操作
- mongoDB——java操作mongoDB实现文件上传下载
- Hadoop系列-HDFS文件操作的JAVA API用法(七)
- hadoop HDFS常用文件操作命令 (转)
- java 实现hadoop的hdfs文件的上传下载删除创建
- java 操作 HDFS上文件
- java操作FTP,实现文件上传下载删除操作