JAVA API操作-創建目錄


一、從hdfs下載文件到windows本地:


package com.css.hdfs01;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 * 從hdfs下載文件到windows本地
 *    
 * 注意:
 * 1.需要配置hadoop環境變量
 * 2.需要編譯好的winutils包
 */
public class HdfsClientDemo02 {
    public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
        // 1.加載配置
        Configuration conf = new Configuration();
        // 2.設置副本數
        conf.set("dfs.replication", "2");
        // 3.設置塊大小
        conf.set("dfs.blocksize", "64m");
        // 4.構造客戶端
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.146.132:9000"), conf, "root");
        // 5.hdfs數據下載到windows本地
        fs.copyToLocalFile(new Path("/hdfs-site.xml"), new Path("c:/"));
        // 6.關閉資源
        fs.close();
    }
}

二、hdfs常用的API:


package com.css.hdfs02;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;

/**
 * hdfs常用的API
 */
public class HdfsClientTest {
    
    FileSystem fs =null;
    
    @Before
    public  void init() throws IOException, InterruptedException, URISyntaxException {
        // 1.加載配置
        Configuration conf = new Configuration();
        // 2.設置副本數
        conf.set("dfs.replication", "2");
        // 3.設置塊大小
        conf.set("dfs.blocksize", "64m");
        // 4.構造客戶端
        fs = FileSystem.get(new URI("hdfs://192.168.146.132:9000/"), conf, "root");
    }
    
    /**
     * 在hdfs中創建文件夾
     * hdfs dfs -mkdir /文件夾名
     */
    @Test
    public void hdfsMkdir() throws IllegalArgumentException, IOException{
        // 1.調用創建文件夾方法
        fs.mkdirs(new Path("/hello"));
        // 2.關閉資源
        fs.close();
    }
    
    /**
     * 在hdfs中 移動/修改文件
     * hdfs dfs -mv /hdfs路徑 /hdfs路徑
     * hdfs dfs -cp /hdfs路徑 /hdfs路徑        
     */
    @Test
    public void hdfsRename() throws IllegalArgumentException, IOException{
        // 1.調用移動修改文件方法
        fs.rename(new Path("/aa.txt"), new Path("/hello/aa.txt"));
        // 2.關閉資源
        fs.close();
    }
    
    /**
     * 在hdfs中 刪除文件/文件夾
     * hdfs dfs -rm /文件名
     * hdfs dfs -rm -r /文件夾名
     */
    @Test
    public void hdfsRm() throws IllegalArgumentException, IOException{
        // 1.調用刪除文件方法
        // 下面的一個參數的方法已棄用
        // fs.delete(new Path("/aaaa.txt"));
        // 參數1:要刪除的路徑  參數2:是否遞歸刪除
        fs.delete(new Path("/aaa111.txt"), true);
        // 2.關閉資源
        fs.close();
    }
    
    /**
     * 查詢hdfs下指定的目錄信息
     */
    @Test
    public void hdfsLs() throws IllegalArgumentException, IOException{
        // 1.調用方法,返回遠程迭代器    
        RemoteIterator<LocatedFileStatus> iter = fs.listFiles(new Path("/"), true);
        // 2.取迭代器數據
        while (iter.hasNext()) {
            // 拿數據
            LocatedFileStatus status = iter.next();
            System.out.println("文件的路徑為:" + status.getPath());
            System.out.println("塊大小為:" + status.getBlockSize());
            System.out.println("文件長度為:" + status.getLen());
            System.out.println("副本數量為:" + status.getReplication());
            System.out.println("塊信息為:" + Arrays.toString(status.getBlockLocations()));
            System.out.println("===============================");
        }
        // 3.關閉資源
        fs.close();
    }
    
    /**
     * 判斷文件還是文件夾
     */
    @Test
    public void hdfsFile() throws IllegalArgumentException, IOException{
        // 1.展示狀態信息
        FileStatus[] listStatus = fs.listStatus(new Path("/"));
        // 2.遍歷所有文件
        for(FileStatus ls:listStatus){
            if (ls.isFile()) {
                // 文件
                System.out.println("文件-----f-----" + ls.getPath().getName());
            }else {
                // 文件夾
                System.out.println("文件夾-----d-----" + ls.getPath().getName());
            }
        }
    }    
}

三、hdfs讀寫文件:


package com.css.hdfs03;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;

/**
 * hdfs讀寫文件
 */
public class ReadData {
    
    FileSystem fs =null;
    @Before
    public  void init() throws IOException, InterruptedException, URISyntaxException {
        // 1.加載配置
        Configuration conf = new Configuration();
        // 2.構造客戶端
        fs = FileSystem.get(new URI("hdfs://192.168.146.132:9000/"), conf, "root");
    }
    
    /**
     * 讀數據方式一
     */
    @Test
    public void testReadData1() throws IllegalArgumentException, IOException{
        // 1.拿到流
        FSDataInputStream in = fs.open(new Path("/a.txt"));
        byte[] buf = new byte[1024];
        in.read(buf);
        System.out.println(new String(buf));
        // 2.關閉資源
        in.close();
        fs.close();
    }
    
    /**
     * 讀數據方式二
     */
    @Test
    public void testReadData2() throws IllegalArgumentException, IOException{
        // 1.拿到流
        FSDataInputStream in = fs.open(new Path("/hdfs-site.xml"));
        // 2.緩沖流
        BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
        // 3.按行讀取
        String line = null;
        // 4.讀數據
        while ((line = br.readLine()) != null) {
            System.out.println(line);
        }
        // 5.關閉資源
        br.close();
        in.close();
        fs.close();
    }
    
    /**
     * 讀取hdfs中指定偏移量
     */
    @Test
    public void testRandomRead() throws IllegalArgumentException, IOException{
        // 1.拿到流
        FSDataInputStream in = fs.open(new Path("/hdfs-site.xml"));
        // 2.移動文件讀取指針到指定位置
        in.seek(14);
        byte[] b = new byte[5];
        // 3.從指針位置開始讀取數組b的長度個字節
        in.read(b);
        System.out.println(new String(b));
        // 4.關閉資源
        in.close();
    }
    
    /**
     * 在hdfs中寫數據方式一
     */
    @Test
    public void testWriteData() throws IllegalArgumentException, IOException{
        // 1.輸出流
        FSDataOutputStream out = fs.create(new Path("/windows.txt"), false);
        // 2.輸入流
        FileInputStream in = new FileInputStream("C:\\Users\\Administrator\\Desktop\\1012.txt");
        byte[] buf = new byte[1024];
        int read = 0;
        while ((read = in.read(buf)) != -1) {
            out.write(buf, 0, read);
        }
        // 3.關閉資源
        in.close();
        out.close();
        fs.close();
    }
    
    /**
     * 在hdfs中寫數據方式二
     */
    @Test
    public void testWriteData1() throws IllegalArgumentException, IOException{
        // 1.創建輸出流
        FSDataOutputStream out = fs.create(new Path("/love"));
        // 2.寫數據
        out.write("Areyouokmylove".getBytes());
        // 3.關閉資源
        IOUtils.closeStream(out);
        fs.close();
    }
}
View Code

JAVA API操作-創建目錄

package com.hdfs;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import org.junit.Before;
import org.junit.Test;

public class HdfsTest {
    
    private FileSystem fs = null;
    
    @Before
    public void init() throws Exception {
        fs = FileSystem.get(new URI("hdfs://192.168.119.128:9000"),
                new Configuration(),"root");
        
    }
    @Test
    public void testMkdir() throws Exception{
        boolean flag = fs.mkdirs(new Path("/javaApi/mk/dir1/dir2"));
        System.out.println(flag ? "創建成功":"創建失敗");
    }
}
 
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
 
import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.json.JSONArray;
import org.slf4j.*;
 
import com.shidai.hadoop.utils.Constant;
import com.shidai.hadoop.utils.DateUtil;
 
public class HDFSTest {
 
    private static String url = Constant.url;
    private static Configuration conf = new Configuration();
 
    public static void getAllDataNode() {
 
        try {
            FileSystem fs = FileSystem.get(conf);
            DistributedFileSystem distributedfs = (DistributedFileSystem) fs;
            DatanodeInfo[] datanodeInfos = distributedfs.getDataNodeStats();
            for (int i = 0; i < datanodeInfos.length; i++) {
                System.out.println("第" + i + "個datanode:" + datanodeInfos[i].getHostName());
            }
 
        } catch (IOException e) {
            e.printStackTrace();
        }
 
    }
 
    /**
     * 創建文件,並寫入內容
     * 
     * @param dst
     * @param contents
     * @throws IOException
     */
    public static void createFile(String dst, byte[] contents) throws IOException {
 
        FileSystem fs = FileSystem.get(URI.create(url), conf);
        Path path = new Path(dst);
        FSDataOutputStream out = fs.create(path);
        out.write(contents);
        out.close();
        fs.close();
        System.out.println("創建文件成功");
 
    }
 
    /**
     * 讀取文件
     * 
     * @param dst
     * @throws JSONException
     * @throws ParseException
     */
    public static void readFile(String dst) throws JSONException, ParseException {
 
        FileSystem fs;
        FSDataInputStream in;
        try {
            fs = FileSystem.get(URI.create(url), conf);
            in = fs.open(new Path(dst));
            byte[] ioBuffer = new byte[1024];
            StringBuffer sf = new StringBuffer();
            int len = -1;
 
            while ((len = in.read(ioBuffer)) != -1) {
                // System.out.write(ioBuffer,0,len);;
                String string = new String(ioBuffer, 0, len);
                sf.append(string);
                len = in.read(ioBuffer);
 
            }
            in.close();
            fs.close();
 
            System.out.println(sf.toString());
            JSONObject json = new JSONObject(sf.toString());
            Long time = json.getLong("last_time");
            String sd = DateUtil.getDate(time * 1000);
 
            System.out.println("上傳時間:" + sd);
 
        } catch (IOException e) {
            e.printStackTrace();
        }
 
    }
 
    /**
     * 遍歷文件
     * 
     * @param dst
     */
    public static void listFiles(String dst) {
        FileSystem fs = null;
        try {
            fs = FileSystem.get(URI.create(url), conf);
 
            RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path(dst), false);
            while (iterator.hasNext()) {
 
                LocatedFileStatus locatedFileStatus = iterator.next();
                if (locatedFileStatus.isFile()) {
                    String path = locatedFileStatus.getPath().toString();
                    System.out.println(path);
                    if (!path.endsWith("tmp")) {
                        readFile(path);
                    }
 
                }
            }
 
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
 
    /**
     * 上傳文件
     * 
     * @param src
     * @param dst
     */
    public static void upload(String src, String dst) {
 
        FileSystem fs = null;
        try {
            fs = FileSystem.get(URI.create(url), conf);
            Path srcPath = new Path(src);
            Path dstPath = new Path(dst);
            fs.copyFromLocalFile(false, srcPath, dstPath);
            // 打印文件路徑
 
            System.out.println("list files");
            FileStatus[] fileStatus = fs.listStatus(dstPath);
            for (FileStatus fstatus : fileStatus) {
                System.out.println(fstatus.getPath());
            }
 
        } catch (IOException e) {
 
            e.printStackTrace();
 
        } finally {
 
            if (fs != null) {
                try {
                    fs.close();
                } catch (IOException e) {
 
                    e.printStackTrace();
 
                }
 
            }
        }
 
    }
 
    /**
     * 刪除文件
     * 
     * @param args
     * @throws JSONException
     * @throws ParseException
     */
    public static void delete(String dst) {
 
        FileSystem fs = null;
        try {
            fs = FileSystem.get(URI.create(url), conf);
            Boolean flag = fs.delete(new Path(dst), false);
            if (flag) {
                System.out.println("刪除成功");
            } else {
                System.out.println("刪除失敗");
            }
        } catch (IOException e) {
            e.printStackTrace();
 
        }
 
    }
 
    public static void main(String[] args) throws JSONException, ParseException {
 
        System.setProperty("hadoop.home.dir", "C:/Users/root/.m2/repository/org/apache/hadoop/hadoop-common/2.5.2");
        byte[] contents = "明月幾時有...\n".getBytes();
        /*
         * try{ // createFile("/user/hadoop/test/hdfs01.txt", contents);
         * }catch(IOException e){ e.printStackTrace(); }
         */
 
        // getAllDataNode();
        // upload("F:/yun/svn/1.txt", "/user/root/");
        // 讀文件
        // readFile("/flume/data/FlumeData.1469543705102");
        // 遍歷文件
        // listFiles("/flume/");
 
        // 刪除文件
        delete("/user/root/test");
 
    }
 
}
View Code


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM