您的位置:首页 > 大数据 > Hadoop

HDFS API基本操作

2015-06-03 22:01 330 查看
对HDFS API基本操作都是通过
org.apache.hadoop.fs.FileSystem
类进行的,以下是一些常见的操作:

package HdfsAPI;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import junit.framework.TestCase;

public class HDFSAPI extends TestCase{  
    public static String hdfsUrl = "hdfs://master:8020";
    //创建文件夹
    @Test
    public void testHDFSmkdir() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path("/liguodong/hdfs");
        fs.mkdirs(path);
    }

    //创建文件
    @Test
    public void testHDFSmkFile() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path("/liguodong/hdfs/liguodong.txt");
        FSDataOutputStream fdos = fs.create(path); 
        fdos.write("hello hadoop".getBytes());
    }
    //重命名
    @Test
    public void testHDFSRenameFile() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path("/liguodong/hdfs/liguodong.txt");
        Path Renamepath = new Path("/liguodong/hdfs/love.txt");
        System.out.println(fs.rename(path, Renamepath));
    }
    //上传一个本地文件
    @Test
    public void testHDFSPutLocalFile1() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        //由于在windows上面调试,所以选择的是windows的目录,
        //如果在Linxu,需要Linux目录。
        Path srcpath = new Path("g:/liguodong.txt");
        Path destpath = new Path("/liguodong/hdfs");
        fs.copyFromLocalFile(srcpath, destpath);
    }

    @Test
    public void testHDFSPutLocalFile2() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        //由于在windows上面调试,所以选择的是windows的目录,
        //如果在Linxu,需要Linux目录。
        String srcpath ="g:/oncemore.txt";
        Path destpath = new Path("/liguodong/hdfs/kkk.txt");
        InputStream is = new BufferedInputStream(
                new FileInputStream(new File(srcpath)));
        FSDataOutputStream fdos = fs.create(destpath);
        IOUtils.copyBytes(is, fdos, 4094);
    }

    //查看某个文件夹下面的所有文件
    @Test
    public void testHDFSListFiles() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path("/liguodong/hdfs");
        FileStatus[] files = fs.listStatus(path);

        for (FileStatus file : files) {
            System.out.println(file.getPath().toString());
        }
    }

    //查看某个文件的数据块信息
    @Test
    public void testHDFSGetBlockInfo() throws IOException{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path("/liguodong/hdfs/kkk.txt");
        FileStatus filestatus = fs.getFileStatus(path);
        BlockLocation[] blkLoc = fs.getFileBlockLocations
                (filestatus, 0, filestatus.getLen());
        for (BlockLocation loc : blkLoc) {
            for (int i = 0; i < loc.getHosts().length; i++) {
                //获取数据块在哪些主机上
                System.out.println(loc.getHosts()[i]);//获取文件块的主机名
                //由于这个文件只有一个块,所以输出结果为:slave2、slave1、slave5
            }
        }
    }   
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: