您的位置:首页 > 大数据 > Hadoop

用JAVA API实现HDFS常用shell命令的功能(新建文件夹,上传文件,下载文件,判断文件是否存在)

2019-05-24 14:58 567 查看
  • 新建文件夹
package hdfs.files;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;

public class HDFSMKdir {
public static void main(String[] args) throws IOException {
//设置root权限
System.setProperty("HADOOP_USER_NAME", "root");
//创建HDFS连接对象client
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://yy001:9000");
FileSystem client = FileSystem.get(conf);
//在HDFS的根目录下创建aadir
client.mkdirs(new Path("/aadir"));
//关闭连接对象
client.close();
//输出"successful!"
System.out.println("successfully!");
}
}
  • 上传文件
package hdfs.files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;

public class HDFSUpload {
//声明输入流、输出流
private static InputStream input;
private static OutputStream output;

public static void main(String[] args) throws IOException {
//设置root权限
System.setProperty("HADOOP_USER_NAME", "root");
//创建HDFS连接对象client
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://yy001:9000");
FileSystem client = FileSystem.get(conf);
//创建本地文件的输入流
input = new FileInputStream("D:\\aa.txt");
//创建HDFS的输出流
output = client.create(new Path("/a.txt"));
//写文件到HDFS
byte[] buffer = new byte[1024];
int len = 0;
while ((len = input.read(buffer)) != -1) {
output.write(buffer, 0, len);
}
//防止输出数据不完整
output.flush();
//使用工具类IOUtils上传或下载
//IOUtils.copy(input, output);
//关闭输入输出流
input.close();
output.close();
System.out.println("成功!");
}
}
  • 下载文件
package hdfs.files;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

import java.io.*;

public class HDFSDownload {
//声明输入流、输出流
private static InputStream input;
private static OutputStream output;

public static void main(String[] args) throws IOException {
//设置root权限
System.setProperty("HADOOP_USER_NAME", "root");
//创建HDFS连接对象client
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://yy001:9000");
FileSystem client = FileSystem.get(conf);
//创建本地文件的输出流
output = new FileOutputStream("d:\\cccout.txt");
//创建HDFS的输入流
input = client.open(new Path("/aadir/aaout.txt"));
//写文件到HDFS
byte[] buffer = new byte[1024];
int len = 0;
while ((len = input.read(buffer)) != -1) {
output.write(buffer, 0, len);
}
//防止输出数据不完整
output.flush();
//使用工具类IOUtils上传或下载
//IOUtils.copy(input, output);
//关闭输入输出流
input.close();
output.close();
System.out.println("成功!");
}
}
  • 判断文件是否存在
package hdfs.files;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
public class HDFSFileIfExist {
public static void main(String[] args) throws IOException {
//设置root权限
System.setProperty("HADOOP_USER_NAME", "root");
//创建HDFS连接对象client
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://yy001:9000");
FileSystem client = FileSystem.get(conf);
//声明文件对象
String fileName = "/aadir/aaout.txt";
//判断文件是否存在
if (client.exists(new Path(fileName))) {
System.out.println("文件存在!");
} else {
System.out.println("文件不存在!");
}
}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐