package cn.hx.test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.IOUtils; import org.apache.log4j.BasicConfigurator; import java.io.FileInputStream; import java.io.IOException; import java.net.URI; public class test1 { public static void main(String[] args) throws IOException { BasicConfigurator.configure(); try { //連接hdfs的URL URI uri = new URI("hdfs://192.168.22.131:9000"); Configuration conf = new Configuration(); FileSystem fileSystem = FileSystem.get(uri, conf); //filestatus表示hdfs中的每個文件夾 FileStatus[] listStatus = fileSystem.listStatus(new Path("/")); for (FileStatus fileStatus : listStatus) { System.out.println(fileStatus); System.out.println("文件路徑:" + fileStatus.getPath()); System.out.println("是不是一個目錄:" + fileStatus.isDirectory()); System.out.println("修改時間:" + fileStatus.getModificationTime()); System.out.println("訪問時間:" + fileStatus.getAccessTime()); System.out.println("全部者:" + fileStatus.getOwner()); System.out.println("用戶:" + fileStatus.getGroup()); System.out.println("文件權限:" + fileStatus.getPermission()); System.out.println("是不是?:" + fileStatus.getSymlink()); } } catch (Exception e) { } } //下載文件 private static void open(FileSystem fileSystem) throws IOException { FSDataInputStream in = fileSystem.open(new Path("/test/1")); IOUtils.copyBytes(in, System.out, 1024, true); } //上傳文件 private static void put(FileSystem fileSystem) throws IOException { FSDataOutputStream out = fileSystem.create(new Path("/test/1")); FileInputStream in = new FileInputStream("\u202AE:\\BigDataVideos\\Hadoop 7\\2015-12-30 【hadoop】\\edits.xml"); IOUtils.copyBytes(in, out, 1024, true); } }
結果:java