[root@hadoop01 ~]# ll total 156 drwxr-xr-x. 18 root root 4096 Jan 5 05:05 apps -rw-r--r--. 1 root root 156 Jul 24 21:47 common-friends.log drwxr-xr-x. 2 root root 4096 Oct 9 09:18 data drwxr-xr-x. 2 root root 4096 Aug 29 01:52 esdata -rw-r--r--. 1 root root 2231 Jul 24 22:00 flow.dat drwxr-xr-x. 9 root root 4096 Oct 8 08:13 flume drwxr-xr-x. 2 root root 4096 Oct 30 18:30 hadooptest drwxr-xr-x. 5 root root 4096 Oct 4 15:12 hdpdata drwxr-xr-x. 4 root root 4096 Oct 5 10:17 hivedata drwxr-xr-x. 3 root root 4096 Sep 25 13:59 journaldata drwxr-xr-x. 5 root root 4096 Oct 13 13:55 logs drwxr-xr-x. 2 root root 4096 Oct 22 09:46 out123 -rwxr-xr-x. 1 root root 4482 Aug 19 09:57 PWD drwxr-xr-x. 3 root root 4096 Dec 21 07:02 soft drwxr-xr-x. 12 hadoop hadoop 4096 Jun 21 2016 spark-1.6.2-bin-hadoop2.6 drwxr-xr-x. 9 root root 4096 Oct 10 10:23 sqoop drwxr-xr-x. 5 root root 4096 Nov 17 08:17 storm-local -rw-r--r--. 1 root root 660 Aug 28 06:33 storm.yaml -rw-r--r--. 1 root root 124 Sep 28 10:06 user_prefer.sh -rw-r--r--. 1 root root 7495 Nov 16 03:42 wordcount.jar -rw-r--r--. 1 root root 56 Oct 13 16:46 word.txt drwxr-xr-x. 3 root root 4096 Jan 5 18:49 zkdata -rw-r--r--. 1 root root 54453 Jan 5 18:49 zookeeper.out [root@hadoop01 ~]# du -sh . [對當前目錄下所有的目錄和文件的大小進行匯總,-s表示匯總,-h表示以KB, MB, GB, TB格式進行人性化顯示] 6.6G . [root@hadoop01 ~]# du -sh * [對當前目錄下每一個目錄和文件的大小分別進行匯總] 3.2G apps 4.0K common-friends.log 416K data 4.0K esdata 4.0K flow.dat 82M flume 4.0K hadooptest 150M hdpdata 92M hivedata 66M journaldata 344M logs 16K out123 8.0K PWD 2.2G soft 303M spark-1.6.2-bin-hadoop2.6 27M sqoop 52K storm-local 4.0K storm.yaml 4.0K user_prefer.sh 8.0K wordcount.jar 4.0K word.txt 260M zkdata 60K zookeeper.out [root@hadoop01 ~]# du -sh apps [對指定的目錄的大小進行匯總] 3.2G apps [root@hadoop01 ~]# du -sh logs/* [對指定的目錄下面所有的子目錄和文件的大小分別進行匯總] 39M logs/click_log 8.0K logs/es-logs 306M logs/kafka-logs 380K logs/test.log [root@hadoop01 ~]# du -sh *|grep [M] [對當前目錄下所有的目錄和文件分別進行匯總,並使用"grep [M]"篩選出所有大小為M(兆)級別的目錄和文件] 82M flume 150M hdpdata 92M hivedata 66M journaldata 344M logs 303M spark-1.6.2-bin-hadoop2.6 27M sqoop 260M zkdata [root@hadoop01 ~]# du -sh *|grep [G] [篩選出大小為GB級別的目錄和文件] 3.2G apps 2.2G soft [root@hadoop01 ~]# du -sh *|grep [MG] [篩選出大小為MB和GB級別的目錄和文件] 3.2G apps 82M flume 150M hdpdata 92M hivedata 66M journaldata 344M logs 2.2G soft 303M spark-1.6.2-bin-hadoop2.6 27M sqoop 260M zkdata [root@hadoop01 ~]# du -sh *|grep [MG]|sort -nr [篩選出大小為MB和GB級別的目錄和文件,並降序排序] 344M logs 303M spark-1.6.2-bin-hadoop2.6 260M zkdata 150M hdpdata 92M hivedata 82M flume 66M journaldata 27M sqoop 3.2G apps 2.2G soft [root@hadoop01 ~]#
如果覺得本文對您有幫助,不妨掃描下方微信二維碼打賞點,您的鼓勵是我前進最大的動力: