学习hadoop的hdfs开发(java)
2017-11-13 14:08
357 查看
测试代码
package net.xxx;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
public class test {
public void WriteFile(String hdfs) throws IOException {
Configuration conf = new Configuration();
try{
FileSystem fs = FileSystem.get(URI.create(hdfs),conf,"hdfs");
FSDataOutputStream hdfsOutStream = fs.create(new Path(hdfs));
hdfsOutStream.writeChars("hello");
hdfsOutStream.close();
fs.close();
}catch(Exception e){
System.out.println(e);
}
}
public void ReadFile(String hdfs) throws IOException {
Configuration conf = new Configuration();
try{
FileSystem fs = FileSystem.get(URI.create(hdfs),conf,"hdfs");
FSDataInputStream hdfsInStream = fs.open(new Path(hdfs));
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while(readLen!=-1)
{
System.out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
System.out.println();
hdfsInStream.close();
fs.close();
}catch(Exception e){
System.out.println(e);
}
}
public static void main(String[] args) throws IOException {
String hdfs = "hdfs://test127:8020/test/hello.txt";
test t = new test();
t.WriteFile(hdfs);
t.ReadFile(hdfs);
}
}
依赖库编译
cp "/opt/cm-5.12.1/share/cmf/common_jars/hadoop-common-2.5.0-cdh5.3.3.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/lib/commons-logging-1.1.1.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/guava-14.0.1.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/commons-collections-3.2.2.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/commons-configuration-1.9.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/commons-lang-2.6.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/hadoop-auth-2.5.0-cdh5.3.3.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/slf4j-log4j12-1.7.5.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/slf4j-api-1.7.5.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/log4j-1.2.17.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/hadoop-hdfs-2.5.0-cdh5.3.3.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/cloudera-navigator-server/libs/cdh5/servlet-api-2.5.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/cloudera-navigator-server/libs/cdh5/protobuf-java-2.5.0.jar" ./lib
编译
javac test.java -cp "./lib/hadoop-common-2.5.0-cdh5.3.3.jar"
manifest.mf
Manifest-Version: 1.0
Class-Path: lib/hadoop-hdfs-2.5.0-cdh5.3.3.jar lib/hadoop-common-2.5.0-cdh5.3.3.jar lib/commons-logging-1.1.1.jar lib/guava-14.0.1.jar lib/commons-collections-3.2.2.jar lib/commons-configuration-1.9.jar lib/commons-lang-2.6.jar lib/hadoop-auth-2.5.0-cdh5.3.3.jar
lib/slf4j-log4j12-1.7.5.jar lib/slf4j-api-1.7.5.jar lib/log4j-1.2.17.jar lib/hadoop-hdfs-2.5.0-cdh5.3.3.jar lib/servlet-api-2.5.jar lib/commons-cli-1.4.jar lib/protobuf-java-2.5.0.jar
Main-Class: net.xxx.test
打包
jar cvfm test.jar manifest.mf lib net
运行
java -jar test.jar
4000
package net.xxx;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
public class test {
public void WriteFile(String hdfs) throws IOException {
Configuration conf = new Configuration();
try{
FileSystem fs = FileSystem.get(URI.create(hdfs),conf,"hdfs");
FSDataOutputStream hdfsOutStream = fs.create(new Path(hdfs));
hdfsOutStream.writeChars("hello");
hdfsOutStream.close();
fs.close();
}catch(Exception e){
System.out.println(e);
}
}
public void ReadFile(String hdfs) throws IOException {
Configuration conf = new Configuration();
try{
FileSystem fs = FileSystem.get(URI.create(hdfs),conf,"hdfs");
FSDataInputStream hdfsInStream = fs.open(new Path(hdfs));
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while(readLen!=-1)
{
System.out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
System.out.println();
hdfsInStream.close();
fs.close();
}catch(Exception e){
System.out.println(e);
}
}
public static void main(String[] args) throws IOException {
String hdfs = "hdfs://test127:8020/test/hello.txt";
test t = new test();
t.WriteFile(hdfs);
t.ReadFile(hdfs);
}
}
依赖库编译
cp "/opt/cm-5.12.1/share/cmf/common_jars/hadoop-common-2.5.0-cdh5.3.3.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/lib/commons-logging-1.1.1.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/guava-14.0.1.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/commons-collections-3.2.2.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/commons-configuration-1.9.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/commons-lang-2.6.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/hadoop-auth-2.5.0-cdh5.3.3.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/slf4j-log4j12-1.7.5.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/slf4j-api-1.7.5.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/log4j-1.2.17.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/common_jars/hadoop-hdfs-2.5.0-cdh5.3.3.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/cloudera-navigator-server/libs/cdh5/servlet-api-2.5.jar" ./lib
cp "/opt/cm-5.12.1/share/cmf/cloudera-navigator-server/libs/cdh5/protobuf-java-2.5.0.jar" ./lib
编译
javac test.java -cp "./lib/hadoop-common-2.5.0-cdh5.3.3.jar"
manifest.mf
Manifest-Version: 1.0
Class-Path: lib/hadoop-hdfs-2.5.0-cdh5.3.3.jar lib/hadoop-common-2.5.0-cdh5.3.3.jar lib/commons-logging-1.1.1.jar lib/guava-14.0.1.jar lib/commons-collections-3.2.2.jar lib/commons-configuration-1.9.jar lib/commons-lang-2.6.jar lib/hadoop-auth-2.5.0-cdh5.3.3.jar
lib/slf4j-log4j12-1.7.5.jar lib/slf4j-api-1.7.5.jar lib/log4j-1.2.17.jar lib/hadoop-hdfs-2.5.0-cdh5.3.3.jar lib/servlet-api-2.5.jar lib/commons-cli-1.4.jar lib/protobuf-java-2.5.0.jar
Main-Class: net.xxx.test
打包
jar cvfm test.jar manifest.mf lib net
运行
java -jar test.jar
4000
相关文章推荐
- hadoop学习之HDFS(2.1):linux下eclipse中配置hadoop-mapreduce开发环境并运行WordCount.java程序
- hadoop生态系统学习之路(三)java实现上传文件(本地或ftp)至hdfs
- 云计算学习笔记---异常处理---hadoop问题处理ERROR org.apache.hadoop.hdfs.server.datanode.DataNode: java.lang.NullPoin
- Hadoop学习笔记(三)——HDFS应用程序开发
- hadoop学习之HDFS(2.7):实例:开发shell脚本定时采集日志数据到hdfs
- hadoop学习笔记--5.HDFS的java api接口访问
- Hadoop2.4.1 HDFS的Java客户端开发
- hadoop 学习(四)之java操作hdfs
- 一脸懵逼学习hadoop之HDFS的java客户端编写
- Hadoop学习之java连接HDFS文件系统
- 马士兵hadoop第三课:java开发hdfs
- 基于eclipse的hadoop开发-----HDFS API学习
- hadoop学习(五)----HDFS的java操作
- Hadoop学习二(java api调用操作HDFS)
- hadoop学习之HDFS(2.4):hadoop数据类型与java数据类型的对比与转换
- hadoop学习记(3)--HDFS(java_demo)
- 大数据IMF传奇 java开发hadoop wodcount和hdfs文件 !
- Hadoop学习笔记:HDFS的java API使用
- Hadoop学习笔记——1.java读取Oracle中表的数据,创建新文件写入Hdfs
- hadoop学习之HDFS(2.8):hdfs的javaAPI使用及示例