hadoop(五): shell命令
2016-09-03 05:43
267 查看
hdfs dfs
-cat URI : 查看文件内容
hdfs dfs -cat hdfs dfs -cat hdfs://mycluster/user/root/rcc1
hdfs dfs -cat file:///usr/hdp/2.4.2.0-258/hadoop/bin/rcc
-ls <args>: 查文件列表
返回格式:permissions number_of_replicas userid groupid filesize modification_date modification_time filename
-mkdir <paths> : 创建目录
The behavior is much like unix mkdir -p creating parent directories along the path
-copyFromLocal <localsrc> URI: 上传文件,类似于put
-put <localsrc> ... <dst>
-copyToLocal [-ignorecrc] [-crc] URI <localdst> : 下载到本地
-get [-ignorecrc] [-crc] <src> <localdst>
-count [-q] <paths> :统计
The output columns with -count are: DIR_COUNT, FILE_COUNT, CONTENT_SIZE FILE_NAME
The output columns with -q are: QUOTA, REMAINING_QUATA, SPACE_QUOTA, REMAINING_SPACE_QUOTA
-du [-s] [-h] URI [URI …]: 统计大小
The -s option will result in an aggregate summary of file lengths being displayed, rather than the individual file
The -h option will format file sizes in a "human-readable" fashion (e.g 64.0m instead of 67108864)
-rm -r [-skipTrash] URI [URI …]
-r: Recursive version of delete
-touchz URI [URI …]
Create a file of zero length
小结:
本质就是linux文件操作命令
-cat URI : 查看文件内容
hdfs dfs -cat hdfs dfs -cat hdfs://mycluster/user/root/rcc1
hdfs dfs -cat file:///usr/hdp/2.4.2.0-258/hadoop/bin/rcc
-ls <args>: 查文件列表
返回格式:permissions number_of_replicas userid groupid filesize modification_date modification_time filename
[root@hdp2 ~]# hdfs dfs -ls Found 4 items drwx------ - root hdfs 0 2016-09-01 21:00 .Trash drwxr-xr-x - root hdfs 0 2016-08-12 12:29 .hiveJars -rw-r--r-- 3 root hdfs 531 2016-09-01 12:53 mapred -rw-r--r-- 3 root hdfs 1857 2016-09-01 12:44 rcc1
-mkdir <paths> : 创建目录
The behavior is much like unix mkdir -p creating parent directories along the path
[root@hdp2 ~]# hdfs dfs -ls Found 4 items drwx------ - root hdfs 0 2016-09-01 21:00 .Trash drwxr-xr-x - root hdfs 0 2016-08-12 12:29 .hiveJars -rw-r--r-- 3 root hdfs 531 2016-09-01 12:53 mapred -rw-r--r-- 3 root hdfs 1857 2016-09-01 12:44 rcc1
[root@hdp2 ~]# hdfs dfs -mkdir hdfs://mycluster/user/root/zhu
[root@hdp2 ~]# hdfs dfs -ls
Found 5 items
drwx------ - root hdfs 0 2016-09-01 21:00 .Trash
drwxr-xr-x - root hdfs 0 2016-08-12 12:29 .hiveJars
-rw-r--r-- 3 root hdfs 531 2016-09-01 12:53 mapred
-rw-r--r-- 3 root hdfs 1857 2016-09-01 12:44 rcc1
drwxr-xr-x - root hdfs 0 2016-09-02 04:35 zhu
-copyFromLocal <localsrc> URI: 上传文件,类似于put
-put <localsrc> ... <dst>
[root@hdp2 ~]# hdfs dfs -ls /user/root/zhu [root@hdp2 ~]# hdfs dfs -put /usr/zhu/a1.png /user/root/zhu [root@hdp2 ~]# hdfs dfs -copyFormLocal /usr/zhu/a2.png /user/root/zhu -copyFormLocal: Unknown command [root@hdp2 ~]# hdfs dfs -copyFromLocal /usr/zhu/a2.png /user/root/zhu [root@hdp2 ~]# hdfs dfs -ls /user/root/zhu Found 2 items -rw-r--r-- 3 root hdfs 86908 2016-09-02 04:48 /user/root/zhu/a1.png -rw-r--r-- 3 root hdfs 55823 2016-09-02 04:49 /user/root/zhu/a2.png [root@hdp2 ~]#
-copyToLocal [-ignorecrc] [-crc] URI <localdst> : 下载到本地
-get [-ignorecrc] [-crc] <src> <localdst>
[root@hdp2 ~]# clear [root@hdp2 ~]# hdfs dfs -ls /user/root/zhu Found 2 items -rw-r--r-- 3 root hdfs 86908 2016-09-02 04:48 /user/root/zhu/a1.png -rw-r--r-- 3 root hdfs 55823 2016-09-02 04:49 /user/root/zhu/a2.png [root@hdp2 ~]# hdfs dfs -copyToLocal hdfs://mycluster/user/root/zhu/a1.png /usr/zhu/tmp [root@hdp2 ~]# hdfs dfs -get /usr/root/zhu/a2.png /usr/zhu/tmp get: `/usr/root/zhu/a2.png': No such file or directory [root@hdp2 ~]# hdfs dfs -get /user/root/zhu/a2.png /usr/zhu/tmp [root@hdp2 ~]# cd /usr/zhu/tmp [root@hdp2 tmp]# ll total 144 -rw-r--r-- 1 root root 86908 Sep 2 04:54 a1.png -rw-r--r-- 1 root root 55823 Sep 2 04:55 a2.png [root@hdp2 tmp]#
-count [-q] <paths> :统计
The output columns with -count are: DIR_COUNT, FILE_COUNT, CONTENT_SIZE FILE_NAME
The output columns with -q are: QUOTA, REMAINING_QUATA, SPACE_QUOTA, REMAINING_SPACE_QUOTA
[root@hdp2 tmp]# hdfs dfs -count /user/root/zhu 1 2 142731 /user/root/zhu [root@hdp2 tmp]# hdfs dfs -count -q /user/root/zhu none inf none inf 1 2 142731 /user/root/zhu [root@hdp2 tmp]#
-du [-s] [-h] URI [URI …]: 统计大小
The -s option will result in an aggregate summary of file lengths being displayed, rather than the individual file
The -h option will format file sizes in a "human-readable" fashion (e.g 64.0m instead of 67108864)
[root@hdp2 tmp]# hdfs dfs -du -h /user/root/zhu 84.9 K /user/root/zhu/a1.png 54.5 K /user/root/zhu/a2.png [root@hdp2 tmp]#
-rm -r [-skipTrash] URI [URI …]
-r: Recursive version of delete
[root@hdp2 tmp]# hdfs dfs -ls /user/root/zhu Found 1 items -rw-r--r-- 3 root hdfs 86908 2016-09-02 05:03 /user/root/zhu/a1.png [root@hdp2 tmp]# hdfs dfs -rm -r /user/root/zhu 16/09/02 05:04:29 INFO fs.TrashPolicyDefault: Namenode trash configuration: Deletion interval = 360 minutes, Emptier interval = 0 minutes. Moved: 'hdfs://mycluster/user/root/zhu' to trash at: hdfs://mycluster/user/root/.Trash/Current [root@hdp2 tmp]# hdfs dfs -ls /user/root/zhu ls: `/user/root/zhu': No such file or directory [root@hdp2 tmp]#
-touchz URI [URI …]
Create a file of zero length
[root@hdp2 tmp]# hdfs dfs -ls Found 4 items drwx------ - root hdfs 0 2016-09-02 05:02 .Trash drwxr-xr-x - root hdfs 0 2016-08-12 12:29 .hiveJars -rw-r--r-- 3 root hdfs 531 2016-09-01 12:53 mapred -rw-r--r-- 3 root hdfs 1857 2016-09-01 12:44 rcc1 [root@hdp2 tmp]# hdfs dfs -touchz a.txt [root@hdp2 tmp]# hdfs dfs -ls Found 5 items drwx------ - root hdfs 0 2016-09-02 05:02 .Trash drwxr-xr-x - root hdfs 0 2016-08-12 12:29 .hiveJars -rw-r--r-- 3 root hdfs 0 2016-09-02 05:08 a.txt -rw-r--r-- 3 root hdfs 531 2016-09-01 12:53 mapred -rw-r--r-- 3 root hdfs 1857 2016-09-01 12:44 rcc1 [root@hdp2 tmp]# hdfs dfs -cat /user/root/a.txt [root@hdp2 tmp]#
小结:
本质就是linux文件操作命令
相关文章推荐
- Hadoop Shell命令
- Hadoop Shell命令
- Hadoop Shell命令 |HDFS Shell命令| HDFS 命令
- Hadoop Shell命令
- hadoop 文件系统shell命令
- Hadoop Shell命令 |HDFS Shell命令| HDFS 命令
- Hadoop Streaming shell 脚本命令汇总
- Hadoop Shell命令(1)
- Hadoop基于Shell命令与底层Unix操作系统的交互
- hadoop 创建用户及hdfs权限,hdfs操作等常用shell命令
- hadoop shell 命令
- hadoop 创建用户及hdfs权限,hdfs操作等常用shell命令
- hadoop shell命令远程提交
- Hadoop Shell命令(1)
- hadoop shell 命令
- Hadoop Shell命令
- hadoop分布式文件系统中的shell命令(转自官方文档)
- Hadoop FS Shell命令大全
- Hadoop Shell命令
- hadoop shell 命令