package demo; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.junit.Test; public class TestMetaData { @Test public void test1() throws Exception{ Configuration conf = new Configuration(); conf.set("fs.defaultFS","hdfs://bigdata11:9000"); FileSystem client = FileSystem.get(conf); FileStatus[] fsList = client.listStatus(new Path("/folder1")); for(FileStatus s:fsList) { System.out.println("文件還是目錄?"+(s.isDirectory()? "目錄" : "文件")); System.out.println(s.getPath().toString()); } } //獲取文件的數據塊信息 @Test public void test2() throws Exception{ Configuration conf = new Configuration(); conf.set("fs.defaultFS","hdfs://bigdata11:9000"); //創建HDFS的客戶端 FileSystem client = FileSystem.get(conf); //獲取文件的信息 FileStatus fs = client.getFileStatus(new Path("/folder1/a.txt")); BlockLocation[] blkLocations = client.getFileBlockLocations(fs,0,fs.getLen()); for(BlockLocation b : blkLocations) { //數據塊的主機信息:數組,表示同一個數據塊的多個副本(冗余)被保存到了不同的主機上 System.out.println(Arrays.toString(b.getHosts())); //獲取的數據塊的名稱 System.out.println(Arrays.toString(b.getNames())); } } }


