【Hadoop】HDFS客户端开发示例
2016-09-02 17:58
323 查看
1、原理、步骤
2、HDFS客户端示例代码
2、HDFS客户端示例代码
package com.ares.hadoop.hdfs; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.URISyntaxException; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test; public class HDFSTest { private static final Logger LOGGER = Logger.getLogger(HDFSTest.class); private FileSystem fs = null; @Before public void setUp() throws IOException, URISyntaxException, InterruptedException { Configuration conf = new Configuration(); // conf.set("fs.defaultFS", "hdfs://HADOOP-NODE1:9000"); // URI uri = new URI("hdfs://HADOOP-NODE1:9000"); // this.fs = FileSystem.get(uri, conf, "HADOOP"); this.fs = FileSystem.get(conf); } @After public void tearDown() throws IOException { // TODO Auto-generated method stub this.fs.close(); } @Test public void testGET() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: GET FILE TEST"); Path path = new Path("hdfs://HADOOP-NODE1:9000/jdk-7u60-linux-x64.tar.gz"); FSDataInputStream fsDataInputStream = this.fs.open(path); FileOutputStream fileOutputStream = new FileOutputStream("./testdata/get-test-jdk.tar.gz"); IOUtils.copy(fsDataInputStream, fileOutputStream); } @Test public void testPUT() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: PUT FILE TEST"); Path path = new Path("hdfs://HADOOP-NODE1:9000/put-test-jdk.tar.gz"); FSDataOutputStream fsDataOutputStream = this.fs.create(path); FileInputStream fileInputStream = new FileInputStream("./testdata/test-jdk.tar.gz"); IOUtils.copy(fileInputStream, fsDataOutputStream); } @Test public void testGET_NEW() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: GET_NEW FILE TEST"); Path src = new Path("hdfs://HADOOP-NODE1:9000/jdk-7u60-linux-x64.tar.gz"); Path dst = new Path("./testdata/get-test-new-jdk.tar.gz"); this.fs.copyToLocalFile(src, dst); } @Test public void testPUT_NEW() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: PUT_NEW FILE TEST"); Path src = new Path("./testdata/test-jdk.tar.gz"); Path dst = new Path("hdfs://HADOOP-NODE1:9000/put-test-new-jdk.tar.gz"); this.fs.copyFromLocalFile(src , dst); } @Test public void testMKDIR() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: MKDIR TEST"); Path f = new Path("/mkdir-test/testa/testb"); this.fs.mkdirs(f); } @Test public void testRM() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: RM TEST"); Path f = new Path("/mkdir-test"); this.fs.delete(f, true); } @Test public void testLIST() throws IOException { // TODO Auto-generated method stub LOGGER.debug("HDFSTest: LIST TEST"); Path f = new Path("/"); //LIST FILES RemoteIterator<LocatedFileStatus> files = this.fs.listFiles(f, true); while (files.hasNext()) { LocatedFileStatus file = files.next(); LOGGER.debug(file.getPath()); LOGGER.debug(file.getPath().getName()); } //LIST DIRS FileStatus[] files2 = this.fs.listStatus(f); // for (int i = 0; i < files2.length; i++) { // LOGGER.debug(files2[i].getPath().getName()); // } for (FileStatus fileStatus : files2) { LOGGER.debug(fileStatus.getPath().getName()); LOGGER.debug(fileStatus.isDirectory()); } } }
相关文章推荐
- Hadoop2.4.1 HDFS的Java客户端开发
- hadoop开发库webhdfs使用介绍
- 基于Hadoop的云盘系统客户端技术难点之二 HDFS文件系统安全保障
- thrift服务端到客户端开发简单示例
- 编译hadoop的eclipse插件hadoop-eclipse-plugin-1.2.1.jar 【用来管理hadoop的HDFS和开发MapReduce项目】
- HDFS客户端的权限错误:Permission denied 分类: hadoop 2015-03-22 07:22 59人阅读 评论(0) 收藏
- 通过GWT示例MAIL学习GWT的客户端开发一:整体布局
- iOS开发之多表视图滑动切换示例(仿"头条"客户端)---优化篇
- Hadoop学习笔记之HBase客户端程序开发
- XFire开发客户端几种方式的示例
- iOS开发之多表视图滑动切换示例(仿"头条"客户端)
- 基于eclipse的hadoop开发-----HDFS API学习
- 基于Hadoop开发网络云盘系统客户端界面设计初稿
- 基于Hadoop开发网络云盘系统客户端界面设计初稿
- Hadoop java api ,HDFS文件操作,便于Web开发的集合
- 基于Hadoop的云盘系统客户端技术难点之二 HDFS文件访问控制
- Hadoop学习笔记之四:HDFS客户端
- Hadoop-2.2.0二次开发报错:No FileSystem for scheme: hdfs
- 第三章 Hadoop2.x应用开发step by step——HDFS
- 利用IDEA工具开发Hadoop2.6,win7下远程连接到hdfs。