由於mapreduce實驗總要查看output/part-r-00000
所以寫個程序
package Utils; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.log4j.Logger; /** * 讀取hdfs上指定文件中的內容 * @company 源辰信息 * @author navy */ public class FindHDFSText { private static Logger log = Logger.getLogger(FindHDFSText.class);// 創建日志記錄器 public static void main(String[] args) { FileSystem fs = null; try { Configuration conf = new Configuration();// 加載配置文件 conf.set("dfs.client.use.datanode.hostname", "true"); URI uri = new URI("hdfs://**:9000/"); // 連接資源位置 fs = FileSystem.get(uri,conf,"hadoop"); // 創建文件系統實例對象 Path p= new Path("output/part-r-00000"); // 默認是讀取/user/navy/下的指定文件 System.out.println("要查看的文件路徑為:"+fs.getFileStatus(p).getPath()); FSDataInputStream fsin = fs.open(fs.getFileStatus(p).getPath()); byte[] bs = new byte[1024 * 1024]; int len = 0; while((len = fsin.read(bs)) != -1){ System.out.print(new String(bs, 0, len)); } System.out.println(); fsin.close(); } catch (Exception e) { log.error("hdfs操作失敗!!!", e); } } }