您的位置:首页 > 大数据 > Hadoop

HDFS追加文件

2014-04-20 12:46 183 查看
配置:hdfs-site.xml
<property>
<name>dfs.support.append</name>
<value>true</value>
</property>
追加一个文件

package com.wyp;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.*;
import java.net.URI;

/**
* blog: http://www.iteblog.com/ * Date: 14-1-2
* Time: 下午6:09
*/
public class AppendContent {
public static void main(String[] args) {
String hdfs_path = "hdfs://mycluster/home/wyp/wyp.txt";//文件路径
Configuration conf = new Configuration();
conf.setBoolean("dfs.support.append", true);

String inpath = "/home/wyp/append.txt";
FileSystem fs = null;
try {
fs = FileSystem.get(URI.create(hdfs_path), conf);
//要追加的文件流,inpath为文件
InputStream in = new
BufferedInputStream(new FileInputStream(inpath));
OutputStream out = fs.append(new Path(hdfs_path));
IOUtils.copyBytes(in, out, 4096, true);
} catch (IOException e) {
e.printStackTrace();
}
}
}


追加一行内容

/**
* 以append方式将内容添加到HDFS上文件的末尾;注意:文件更新,需要在hdfs-site.xml中添<property><name>dfs.
* append.support</name><value>true</value></property>
*/
private static void appendToHdfs() throws FileNotFoundException,
IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataOutputStream out = fs.append(new Path(dst));

int readLen = "zhangzk add by hdfs java api".getBytes().length;

while (-1 != readLen) {
out.write("zhangzk add by hdfs java api".getBytes(), 0, readLen);
}
out.close();
fs.close();
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: