HDFS Shell命令操作與java代碼操作


(一)編程實現以下功能,並利用 Hadoop 提供的 Shell 命令完成相同任務:

(1)     向 HDFS 中上傳任意文本文件,如果指定的文件在 HDFS 中已經存在,則由用戶來指定是追加到原有文件末尾還是覆蓋原有的文件;

開啟Hadoop:

 

創建兩個文件以供實驗使用:
上傳本地文件到hdfs系統的指令:

hadoop fs -put text.txt

 

文件存在,追加到文件末尾的指令:

hadoop fs -appendToFile local.txt text.txt

 

文件存在,覆蓋文件的命令:

hadoop fs -copyFromLocal -f local.txt text.txt

 

代碼實現:

 

package cn.edu.zucc.hdfs;

 

import java.io.FileInputStream;

import java.io.IOException;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

 

public class CopyFromLocalFile {

   /**

    * 判斷路徑是否存在

    */

   public static boolean test(Configuration conf, String path) {

       try (FileSystem fs = FileSystem.get(conf)) {

           return fs.exists(new Path(path));

       } catch (IOException e) {

           e.printStackTrace();

           return false;

       }

   }

 

   /**

    * 復制文件到指定路徑 若路徑已存在,則進行覆蓋

    */

   public static void copyFromLocalFile(Configuration conf,

           String localFilePath, String remoteFilePath) {

       Path localPath = new Path(localFilePath);

       Path remotePath = new Path(remoteFilePath);

       try (FileSystem fs = FileSystem.get(conf)) {

           /* fs.copyFromLocalFile 第一個參數表示是否刪除源文件,第二個參數表示是否覆蓋 */

           fs.copyFromLocalFile(false, true, localPath, remotePath);

       } catch (IOException e) {

           e.printStackTrace();

       }

 

   }

 

   /**

    * 追加文件內容

    */

   public static void appendToFile(Configuration conf, String localFilePath,

           String remoteFilePath) {

       Path remotePath = new Path(remoteFilePath);

       try (FileSystem fs = FileSystem.get(conf);

               FileInputStream in = new FileInputStream(localFilePath);) {

           FSDataOutputStream out = fs.append(remotePath);

           byte[] data = new byte[1024];

           int read = -1;

           while ((read = in.read(data)) > 0) {

               out.write(data, 0, read);

           }

           out.close();

       } catch (IOException e) {

           e.printStackTrace();

       }

   }

 

   /**

    * 主函數

    */

   public static void main(String[] args) {

       Configuration conf = new Configuration();

       conf.set("fs.defaultFS", "hdfs://localhost:9000");

       String localFilePath = "/usr/local/hadoop/text.txt"; // 本地路徑

       String remoteFilePath = "/user/tiny/text.txt"; // HDFS路徑

       // String choice = "append"; // 若文件存在則追加到文件末尾

       String choice = "overwrite"; // 若文件存在則覆蓋

 

       try {

           /* 判斷文件是否存在 */

           boolean fileExists = false;

           if (CopyFromLocalFile.test(conf, remoteFilePath)) {

               fileExists = true;

               System.out.println(remoteFilePath + " 已存在.");

           } else {

               System.out.println(remoteFilePath + " 不存在.");

           }

           /* 進行處理 */

           if (!fileExists) { // 文件不存在,則上傳

               CopyFromLocalFile.copyFromLocalFile(conf, localFilePath,

                       remoteFilePath);

               System.out.println(localFilePath + " 已上傳至 " + remoteFilePath);

           } else if (choice.equals("overwrite")) { // 選擇覆蓋

               CopyFromLocalFile.copyFromLocalFile(conf, localFilePath,

                       remoteFilePath);

               System.out.println(localFilePath + " 已覆蓋 " + remoteFilePath);

           } else if (choice.equals("append")) { // 選擇追加

               CopyFromLocalFile.appendToFile(conf, localFilePath,

                       remoteFilePath);

               System.out.println(localFilePath + " 已追加至 " + remoteFilePath);

           }

       } catch (Exception e) {

           e.printStackTrace();

       }

   }

}

 

 

 

(2)     從 HDFS 中下載指定文件,如果本地文件與要下載的文件名稱相同,則自動對下載的文件重命名;

Shell命令:

                if $(hadoop fs -test -e /usr/local/hadoop/text.txt);

                then $(hadoop fs -copyToLocal text.txt ./text.txt); 
                else $(hadoop fs -copyToLocal text.txt ./text2.txt); 
                fi
 
                代碼實現:

      package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

 

public class CopyToLocal {

    /**

     * 下載文件到本地 判斷本地路徑是否已存在,若已存在,則自動進行重命名

     */

    public static void copyToLocal(Configuration conf, String remoteFilePath,

            String localFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf)) {

            File f = new File(localFilePath);

            /* 如果文件名存在,自動重命名(在文件名后面加上 _0, _1 ...) */

            if (f.exists()) {

                System.out.println(localFilePath + " 已存在.");

                Integer i = Integer.valueOf(0);

                while (true) {

                    f = new File(localFilePath + "_" + i.toString());

                    if (!f.exists()) {

                        localFilePath = localFilePath + "_" + i.toString();

                        break;

                    } else {

                        i++;

                        continue;

                    }

                }

                System.out.println("將重新命名為: " + localFilePath);

            }

            // 下載文件到本地

            Path localPath = new Path(localFilePath);

            fs.copyToLocalFile(remotePath, localPath);

        } catch (IOException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String localFilePath = "/usr/local/hadoop/text.txt"; // 本地路徑

        String remoteFilePath = "/user/tiny/text.txt"; // HDFS路徑

 

        try {

            CopyToLocal.copyToLocal(conf, remoteFilePath, localFilePath);

            System.out.println("下載完成");

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

(3)     將 HDFS 中指定文件的內容輸出到終端中;

Shell命令:

hadoop fs -cat text.txt

 

代碼實現:

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

 

public class Cat {

    /**

     * 讀取文件內容

     */

    public static void cat(Configuration conf, String remoteFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf);

                FSDataInputStream in = fs.open(remotePath);

                BufferedReader d = new BufferedReader(new InputStreamReader(in));) {

            String line;

            while ((line = d.readLine()) != null) {

                System.out.println(line);

            }

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "/user/tiny/input/text.txt"; // HDFS路徑

 

        try {

            System.out.println("讀取文件: " + remoteFilePath);

            Cat.cat(conf, remoteFilePath);

            System.out.println("\n讀取完成");

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

 

(4)     顯示 HDFS 中指定的文件的讀寫權限、大小、創建時間、路徑等信息;

Shell命令:

hadoop fs -ls -h text.txt

 

代碼實現:

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

import java.text.SimpleDateFormat;

 

public class List {

    /**

     * 顯示指定文件的信息

     */

    public static void ls(Configuration conf, String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            FileStatus[] fileStatuses = fs.listStatus(remotePath);

            for (FileStatus s : fileStatuses) {

                System.out.println("路徑: " + s.getPath().toString());

                System.out.println("權限: " + s.getPermission().toString());

                System.out.println("大小: " + s.getLen());

                /* 返回的是時間戳,轉化為時間日期格式 */

                long timeStamp = s.getModificationTime();

                SimpleDateFormat format = new SimpleDateFormat(

                        "yyyy-MM-dd HH:mm:ss");

                String date = format.format(timeStamp);

                System.out.println("時間: " + date);

            }

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "/user/tiny/text.txt"; // HDFS路徑

 

        try {

            System.out.println("讀取文件信息: " + remoteFilePath);

            List.ls(conf, remoteFilePath);

            System.out.println("\n讀取完成");

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

 

(5)     給定 HDFS 中某一個目錄,輸出該目錄下的所有文件的讀寫權限、大小、創建時間、路徑等信息,如果該文件是目錄,則遞歸輸出該目錄下所有文件相關信息;

Shell命令:hadoop fs -ls -R -h /user/tiny

代碼實現:

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

import java.text.SimpleDateFormat;

 

public class ListDir {

    /**

     * 顯示指定文件夾下所有文件的信息(遞歸)

     */

    public static void lsDir(Configuration conf, String remoteDir) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path dirPath = new Path(remoteDir);

            /* 遞歸獲取目錄下的所有文件 */

            RemoteIterator<LocatedFileStatus> remoteIterator = fs.listFiles(

                    dirPath, true);

            /* 輸出每個文件的信息 */

            while (remoteIterator.hasNext()) {

                FileStatus s = remoteIterator.next();

                System.out.println("路徑: " + s.getPath().toString());

                System.out.println("權限: " + s.getPermission().toString());

                System.out.println("大小: " + s.getLen());

                /* 返回的是時間戳,轉化為時間日期格式 */

                Long timeStamp = s.getModificationTime();

                SimpleDateFormat format = new SimpleDateFormat(

                        "yyyy-MM-dd HH:mm:ss");

                String date = format.format(timeStamp);

                System.out.println("時間: " + date);

                System.out.println();

            }

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteDir = "/user/tiny"; // HDFS路徑

 

        try {

            System.out.println("(遞歸)讀取目錄下所有文件的信息: " + remoteDir);

            ListDir.lsDir(conf, remoteDir);

            System.out.println("讀取完成");

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

(6)     提供一個 HDFS 內的文件的路徑,對該文件進行創建和刪除操作。如果文件所在目錄不存在,則自動創建目錄;

 Shell命令:

if $(hadoop fs -test -d dir1/dir2);

then $(hadoop fs -touchz dir1/dir2/filename);

else $(hadoop fs -mkdir -p dir1/dir2 && hdfs dfs -touchz dir1/dir2/filename);

fi




代碼:

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import java.io.*;

 

public class RemoveOrMake {

    /**

     * 判斷路徑是否存在

     */

    public static boolean test(Configuration conf, String path) {

        try (FileSystem fs = FileSystem.get(conf)) {

            return fs.exists(new Path(path));

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 創建目錄

     */

    public static boolean mkdir(Configuration conf, String remoteDir) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path dirPath = new Path(remoteDir);

            return fs.mkdirs(dirPath);

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 創建文件

     */

    public static void touchz(Configuration conf, String remoteFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf)) {

            FSDataOutputStream outputStream = fs.create(remotePath);

            outputStream.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 刪除文件

     */

    public static boolean rm(Configuration conf, String remoteFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf)) {

            return fs.delete(remotePath, false);

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "/user/tiny/input/text.txt"; // HDFS路徑

        String remoteDir = "/user/tiny/input"; // HDFS路徑對應的目錄

 

        try {

            /* 判斷路徑是否存在,存在則刪除,否則進行創建 */

            if (RemoveOrMake.test(conf, remoteFilePath)) {

                RemoveOrMake.rm(conf, remoteFilePath); // 刪除

                System.out.println("刪除文件: " + remoteFilePath);

            } else {

                if (!RemoveOrMake.test(conf, remoteDir)) { // 若目錄不存在,則進行創建

                    RemoveOrMake.mkdir(conf, remoteDir);

                    System.out.println("創建文件夾: " + remoteDir);

                }

                RemoveOrMake.touchz(conf, remoteFilePath);

                System.out.println("創建文件: " + remoteFilePath);

            }

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

 

 

 

(7)     提供一個 HDFS 的目錄的路徑,對該目錄進行創建和刪除操作。創建目錄時,如果目錄文件所在目錄不存在,則自動創建相應目錄;刪除目錄時,由用戶指定當該目錄不為空時是否還刪除該目錄;

Shell命令:

if $(hadoop fs -test -d dir1/dir2);

then $(hadoop fs -touchz dir1/dir2/filename);

else $(hadoop fs -mkdir -p dir1/dir2 && hdfs dfs -touchz dir1/dir2/filename);

fi

代碼:

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

 

public class AppendToFile {

    /**

     * 判斷路徑是否存在

     */

    public static boolean test(Configuration conf, String path) {

        try (FileSystem fs = FileSystem.get(conf)) {

            return fs.exists(new Path(path));

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 追加文本內容

     */

    public static void appendContentToFile(Configuration conf, String content,

            String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            /* 創建一個文件輸出流,輸出的內容將追加到文件末尾 */

            FSDataOutputStream out = fs.append(remotePath);

            out.write(content.getBytes());

            out.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

 

    }

 

    /**

     * 追加文件內容

     */

    public static void appendToFile(Configuration conf, String localFilePath,

            String remoteFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf);

                FileInputStream in = new FileInputStream(localFilePath);) {

            FSDataOutputStream out = fs.append(remotePath);

            byte[] data = new byte[1024];

            int read = -1;

            while ((read = in.read(data)) > 0) {

                out.write(data, 0, read);

            }

            out.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 移動文件到本地 移動后,刪除源文件

     */

    public static void moveToLocalFile(Configuration conf,

            String remoteFilePath, String localFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            Path localPath = new Path(localFilePath);

            fs.moveToLocalFile(remotePath, localPath);

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 創建文件

     */

    public static void touchz(Configuration conf, String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            FSDataOutputStream outputStream = fs.create(remotePath);

            outputStream.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "/user/tiny/text.txt"; // HDFS文件

        String content = "新追加的內容\n";

        String choice = "after"; // 追加到文件末尾

        // String choice = "before"; // 追加到文件開頭

 

        try {

            /* 判斷文件是否存在 */

            if (!AppendToFile.test(conf, remoteFilePath)) {

                System.out.println("文件不存在: " + remoteFilePath);

            } else {

                if (choice.equals("after")) { // 追加在文件末尾

                    AppendToFile.appendContentToFile(conf, content,

                            remoteFilePath);

                    System.out.println("已追加內容到文件末尾" + remoteFilePath);

                } else if (choice.equals("before")) { // 追加到文件開頭

                    /* 沒有相應的api可以直接操作,因此先把文件移動到本地,創建一個新的HDFS,再按順序追加內容 */

                    String localTmpPath = "/user/hadoop/tmp.txt";

                    AppendToFile.moveToLocalFile(conf, remoteFilePath,

                            localTmpPath); // 移動到本地

                    AppendToFile.touchz(conf, remoteFilePath); // 創建一個新文件

                    AppendToFile.appendContentToFile(conf, content,

                            remoteFilePath); // 先寫入新內容

                    AppendToFile.appendToFile(conf, localTmpPath,

                            remoteFilePath); // 再寫入原來內容

                    System.out.println("已追加內容到文件開頭: " + remoteFilePath);

                }

            }

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

 

 

(8)     向 HDFS 中指定的文件追加內容,由用戶指定內容追加到原有文件的開頭或結尾;

Shell命令:

追加帶文件末尾:

hadoop fs -appendToFile local.txt text.txt

追加到文件開頭:

hadoop fs -get text.txt
cat text.txt >> local.txt
hadoop fs -copyFromLocal -f text.txt text.txt

代碼:

 

 

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

 

public class AppendToFile {

    /**

     * 判斷路徑是否存在

     */

    public static boolean test(Configuration conf, String path) {

        try (FileSystem fs = FileSystem.get(conf)) {

            return fs.exists(new Path(path));

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 追加文本內容

     */

    public static void appendContentToFile(Configuration conf, String content,

            String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            /* 創建一個文件輸出流,輸出的內容將追加到文件末尾 */

            FSDataOutputStream out = fs.append(remotePath);

            out.write(content.getBytes());

            out.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

 

    }

 

    /**

     * 追加文件內容

     */

    public static void appendToFile(Configuration conf, String localFilePath,

            String remoteFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf);

                FileInputStream in = new FileInputStream(localFilePath);) {

            FSDataOutputStream out = fs.append(remotePath);

            byte[] data = new byte[1024];

            int read = -1;

            while ((read = in.read(data)) > 0) {

                out.write(data, 0, read);

            }

            out.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 移動文件到本地 移動后,刪除源文件

     */

    public static void moveToLocalFile(Configuration conf,

            String remoteFilePath, String localFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            Path localPath = new Path(localFilePath);

            fs.moveToLocalFile(remotePath, localPath);

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 創建文件

     */

    public static void touchz(Configuration conf, String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            FSDataOutputStream outputStream = fs.create(remotePath);

            outputStream.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "/user/tiny/text.txt"; // HDFS文件

        String content = "新追加的內容\n";

        String choice = "after"; // 追加到文件末尾

        // String choice = "before"; // 追加到文件開頭

 

        try {

            /* 判斷文件是否存在 */

            if (!AppendToFile.test(conf, remoteFilePath)) {

                System.out.println("文件不存在: " + remoteFilePath);

            } else {

                if (choice.equals("after")) { // 追加在文件末尾

                    AppendToFile.appendContentToFile(conf, content,

                            remoteFilePath);

                    System.out.println("已追加內容到文件末尾" + remoteFilePath);

                } else if (choice.equals("before")) { // 追加到文件開頭

                    /* 沒有相應的api可以直接操作,因此先把文件移動到本地,創建一個新的HDFS,再按順序追加內容 */

                    String localTmpPath = "/user/hadoop/tmp.txt";

                    AppendToFile.moveToLocalFile(conf, remoteFilePath,

                            localTmpPath); // 移動到本地

                    AppendToFile.touchz(conf, remoteFilePath); // 創建一個新文件

                    AppendToFile.appendContentToFile(conf, content,

                            remoteFilePath); // 先寫入新內容

                    AppendToFile.appendToFile(conf, localTmpPath,

                            remoteFilePath); // 再寫入原來內容

                    System.out.println("已追加內容到文件開頭: " + remoteFilePath);

                }

            }

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

 

 

(9)     刪除 HDFS 中指定的文件;

Shell命令:Hadoop fs -rmr test

 

 

(10)   在 HDFS 中,將文件從源路徑移動到目的路徑。

Shell命令:hadoop fs -mv text.txt input

 

代碼:

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

 

public class MoveFile {

    /**

     * 移動文件

     */

    public static boolean mv(Configuration conf, String remoteFilePath,

            String remoteToFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path srcPath = new Path(remoteFilePath);

            Path dstPath = new Path(remoteToFilePath);

            return fs.rename(srcPath, dstPath);

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "hdfs:///user/tiny/text.txt"; // 源文件HDFS路徑

        String remoteToFilePath = "hdfs:///user/tiny/input"; // 目的HDFS路徑

 

        try {

            if (MoveFile.mv(conf, remoteFilePath, remoteToFilePath)) {

                System.out.println("將文件 " + remoteFilePath + " 移動到 "

                        + remoteToFilePath);

            } else {

                System.out.println("操作失敗(源文件不存在或移動失敗)");

            }

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

 

 

(二)編程實現一個類“MyFSDataInputStream”,該類繼承“org.apache.hadoop.fs.FSDataInputSt ream”,要求如下:實現按行讀取 HDFS 中指定文件的方法“readLine()”,如果讀到文件末尾,則返回空,否則返回文件一行的文本。

代碼:

 

package cn.edu.zucc.hdfs;

 

import java.io.BufferedReader;

import java.io.IOException;

import java.io.InputStream;

import java.io.InputStreamReader;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

 

public class MyFSDataInputStream extends FSDataInputStream {

 

   public MyFSDataInputStream(InputStream in) {

       super(in);

   }

 

   public static String readline(Configuration conf, String remoteFilePath) {

       try (FileSystem fs = FileSystem.get(conf)) {

           Path remotePath = new Path(remoteFilePath);

           FSDataInputStream in = fs.open(remotePath);

           BufferedReader d = new BufferedReader(new InputStreamReader(in));

           String line = null;

           if ((line = d.readLine()) != null) {

               d.close();

               in.close();

               return line;

           }

           return null;

       } catch (IOException e) {

           // TODO Auto-generated catch block

           e.printStackTrace();

           return null;

       }

 

   }

 

   public static void main(String[] args) {

       Configuration conf = new Configuration();

       conf.set("fs.default.name", "hdfs://localhost:9000");

       String remoteFilePath = "/user/tiny/text.txt"; // HDFS路徑

 

       System.out.println("讀取文件: " + remoteFilePath);

       System.out.println(MyFSDataInputStream.readline(conf, remoteFilePath));

       System.out.println("\n讀取完成");

 

   }

}

 

 

(三)查看 Java 幫助手冊或其它資料,用“java.net.URL”和“org.apache.hadoop.fs.FsURLStrea mHandlerFactory”編程完成輸出 HDFS 中指定文件的文本到終端中。

代碼:

 

package cn.edu.zucc.hdfs;

 

import java.io.IOException;

import java.io.InputStream;

import java.net.URL;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.io.IOUtils;

 

public class FsUrl {

    static {

        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());

    }

 

    public static void cat(String remoteFilePath) {

        try (InputStream in = new URL("hdfs", "localhost", 9000, remoteFilePath)

                .openStream()) {

            IOUtils.copyBytes(in, System.out, 4096, false);

            IOUtils.closeStream(in);

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        String remoteFilePath = "/user/tiny/text.txt"; // HDFS路徑

 

        try {

            System.out.println("讀取文件: " + remoteFilePath);

            FsUrl.cat(remoteFilePath);

            System.out.println("\n讀取完成");

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}

 

package cn.edu.zucc.hdfs;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.*;

import org.apache.hadoop.fs.FileSystem;

 

import java.io.*;

 

public class AppendToFile {

    /**

     * 判斷路徑是否存在

     */

    public static boolean test(Configuration conf, String path) {

        try (FileSystem fs = FileSystem.get(conf)) {

            return fs.exists(new Path(path));

        } catch (IOException e) {

            e.printStackTrace();

            return false;

        }

    }

 

    /**

     * 追加文本內容

     */

    public static void appendContentToFile(Configuration conf, String content,

            String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            /* 創建一個文件輸出流,輸出的內容將追加到文件末尾 */

            FSDataOutputStream out = fs.append(remotePath);

            out.write(content.getBytes());

            out.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

 

    }

 

    /**

     * 追加文件內容

     */

    public static void appendToFile(Configuration conf, String localFilePath,

            String remoteFilePath) {

        Path remotePath = new Path(remoteFilePath);

        try (FileSystem fs = FileSystem.get(conf);

                FileInputStream in = new FileInputStream(localFilePath);) {

            FSDataOutputStream out = fs.append(remotePath);

            byte[] data = new byte[1024];

            int read = -1;

            while ((read = in.read(data)) > 0) {

                out.write(data, 0, read);

            }

            out.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 移動文件到本地 移動后,刪除源文件

     */

    public static void moveToLocalFile(Configuration conf,

            String remoteFilePath, String localFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            Path localPath = new Path(localFilePath);

            fs.moveToLocalFile(remotePath, localPath);

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 創建文件

     */

    public static void touchz(Configuration conf, String remoteFilePath) {

        try (FileSystem fs = FileSystem.get(conf)) {

            Path remotePath = new Path(remoteFilePath);

            FSDataOutputStream outputStream = fs.create(remotePath);

            outputStream.close();

        } catch (IOException e) {

            e.printStackTrace();

        }

    }

 

    /**

     * 主函數

     */

    public static void main(String[] args) {

        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://localhost:9000");

        String remoteFilePath = "/user/tiny/text.txt"; // HDFS文件

        String content = "新追加的內容\n";

        String choice = "after"; // 追加到文件末尾

        // String choice = "before"; // 追加到文件開頭

 

        try {

            /* 判斷文件是否存在 */

            if (!AppendToFile.test(conf, remoteFilePath)) {

                System.out.println("文件不存在: " + remoteFilePath);

            } else {

                if (choice.equals("after")) { // 追加在文件末尾

                    AppendToFile.appendContentToFile(conf, content,

                            remoteFilePath);

                    System.out.println("已追加內容到文件末尾" + remoteFilePath);

                } else if (choice.equals("before")) { // 追加到文件開頭

                    /* 沒有相應的api可以直接操作,因此先把文件移動到本地,創建一個新的HDFS,再按順序追加內容 */

                    String localTmpPath = "/user/hadoop/tmp.txt";

                    AppendToFile.moveToLocalFile(conf, remoteFilePath,

                            localTmpPath); // 移動到本地

                    AppendToFile.touchz(conf, remoteFilePath); // 創建一個新文件

                    AppendToFile.appendContentToFile(conf, content,

                            remoteFilePath); // 先寫入新內容

                    AppendToFile.appendToFile(conf, localTmpPath,

                            remoteFilePath); // 再寫入原來內容

                    System.out.println("已追加內容到文件開頭: " + remoteFilePath);

                }

            }

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

}


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM