package com.duking.util; import java.io.IOException; import java.util.Date; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class HDFSUtil { /** * 建立一個文件 * path:路徑 txt:文件類容 * */ public void CreateFile(String path,String txt) throws IOException { Configuration conf=new Configuration(); FileSystem hdfs=FileSystem.get(conf); byte[] buff=txt.getBytes(); Path dfs=new Path(path); FSDataOutputStream outputStream=hdfs.create(dfs); outputStream.write(buff,0,buff.length); outputStream.close(); hdfs.close(); System.out.println("Runing CreateFile over!!"); }
//上傳本地文件 public static void uploadFile(String src,String dst) throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path srcPath = new Path(src); //原路徑 Path dstPath = new Path(dst); //目標路徑 //調用文件系統的文件複製函數,前面參數是指是否刪除原文件,true爲刪除,默認爲false fs.copyFromLocalFile(false,srcPath, dstPath); //打印文件路徑 System.out.println("Upload to "+conf.get("fs.default.name")); System.out.println("------------list files------------"+"\n"); FileStatus [] fileStatus = fs.listStatus(dstPath); for (FileStatus file : fileStatus) { System.out.println(file.getPath()); } fs.close(); }
/** * 建立文件夾 * @param path * @throws IOException */ public void CreateDir(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs=FileSystem.get(conf); Path dfs=new Path(path); hdfs.mkdirs(dfs); hdfs.close(); } /** * 從命名文件 * @param oldpath * @param newpath * @throws IOException */ public void RenameFile(String oldpath,String newpath) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); Path frpaht=new Path(oldpath); //舊的文件名 Path topath=new Path(newpath); //新的文件名 boolean isRename=hdfs.rename(frpaht, topath); String result=isRename?"成功":"失敗"; System.out.println("文件重命名結果爲:"+result); hdfs.close(); } /** * 刪除文件 或者目錄 * @param path * @throws IOException */ public void DeleteFile(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); hdfs.delete(new Path(path), true); hdfs.close(); } /** * 判斷文件或目錄是否存在 * @param path * @throws IOException */ public void CheckFile(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); Path findf=new Path(path); boolean isExists=hdfs.exists(findf); System.out.println("Exist?"+isExists); hdfs.close(); } /** * 查看HDFS文件的最後修改時間 * @param path * @throws IOException */ public void GetLTime(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); Path fpath =new Path(path); FileStatus fileStatus=hdfs.getFileStatus(fpath); long modiTime=fileStatus.getModificationTime(); Date date = new Date(modiTime); System.out.println("file1.txt change time is"+date); hdfs.close(); } /** * 獲取文件信息 * @param path * @throws IOException */ public void GetFileInfo(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); Path fpath =new Path(path); FileStatus fileStatus=hdfs.getFileStatus(fpath); System.out.println("文件路徑:"+fileStatus.getPath()); System.out.println("塊的大小:"+fileStatus.getBlockSize()); System.out.println("文件全部者:"+fileStatus.getOwner()+":"+fileStatus.getGroup()); System.out.println("文件權限:"+fileStatus.getPermission()); System.out.println("文件長度:"+fileStatus.getLen()); System.out.println("備份數:"+fileStatus.getReplication()); System.out.println("修改時間:"+fileStatus.getModificationTime()); hdfs.close(); } /** * 獲取目錄下的全部文件 * @param path * @throws IOException */ public void GetAllFile(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); Path fpath =new Path(path); FileStatus stats[]=hdfs.listStatus(fpath); for(int i = 0; i < stats.length; ++i) { System.out.println(stats[i].getPath().toString()); } hdfs.close(); } /** * 查看文件所在集羣的位置 * @param path * @throws IOException */ public void FindFilePost(String path) throws IOException { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); Path fpath =new Path(path); FileStatus fileStatus=hdfs.getFileStatus(fpath); BlockLocation[] blkLocations = hdfs.getFileBlockLocations(fileStatus,0,fileStatus.getLen()); int blockLen = blkLocations.length; for(int i=0;i<blockLen;i++){ String[] hosts = blkLocations[i].getHosts(); System.out.println("block_"+i+"_location:"+hosts[0]); } hdfs.close(); } }
package com.duking.test; import java.io.IOException; import com.duking.util.HDFSUtil; public class HDFSTest { public static void main(String[] args) throws IOException { HDFSUtil Hdfs = new HDFSUtil(); //Hdfs.CreateFile("/user/duking/testfile2.txt","hello,world"); //Hdfs.CreateDir("/user/test"); //Hdfs.RenameFile("/user/duking/testfile.txt", "/user/duking/test.txt"); //Hdfs.DeleteFile("/user/duking/test.txt"); //Hdfs.CheckFile("/user/hadoop/input/protocols"); //Hdfs.GetLTime("/user/hadoop/input/protocols"); //Hdfs.GetFileInfo("/user/hadoop/input/protocols"); //Hdfs.GetAllFile("/user/hadoop/input/"); Hdfs.FindFilePost("/user/hadoop/input/protocols"); System.out.println("Runing is over!!"); } }
更多HDFS的操做請查看HDFS JAVA APIhtml
http://hadoop.apache.org/docs/stable/api/index.htmljava
http://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/HdfsUserGuide.htmlapache