IDEA中新建Maven工程,添加POM依賴, 在IDE的提示中, 點擊 Import Changes 等待自動下載完成相關的依賴包。java
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>cn.itcast</groupId> <artifactId>example-hdfs</artifactId> <version>1.0-SNAPSHOT</version> <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.7.5</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.7.5</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>2.7.5</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>RELEASE</version> </dependency> </dependencies> </project>
啓動hadoop後, 在IDEA中測試mkdir, put, get 建立文件夾,上傳,下載node
package cn.lshm.hdfs; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import java.io.FileInputStream; public class TestHDFSClient { public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); //這裏指定使用的是 hdfs文件系統 conf.set("fs.defaultFS", "hdfs://master:9000"); //經過這種方式設置java客戶端身份 System.setProperty("HADOOP_USER_NAME", "root"); FileSystem fs = FileSystem.get(conf); //或者使用下面的方式設置客戶端身份 //FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"),conf,"root"); // fs.create(new Path("/helloByJava")); //建立一個目錄 //文件下載到本地 若是出現0644錯誤或找不到winutils.exe,則須要設置windows環境和相關文件. //fs.copyToLocalFile(new Path("/zookeeper.out"), new Path("D:\\test\\examplehdfs")); //使用Stream的形式操做HDFS,這是更底層的方式 FSDataOutputStream outputStream = fs.create(new Path("/2.txt"), true); //輸出流到HDFS FileInputStream inputStream = new FileInputStream("D:/test/examplehdfs/1.txt"); //從本地輸入流。 IOUtils.copy(inputStream, outputStream); //完成從本地上傳文件到hdfs fs.close(); } }
Run以後,沒有報錯,到hdfs上查看,是否有對應的結果。mysql
最後,老師建立了一個類,方便調用。sql
package cn.lshm.hdfs; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.junit.Before; import org.junit.Test; public class HdfsClient { FileSystem fs = null; @Before public void init() throws Exception { Configuration conf = new Configuration(); // conf.set("fs.defaultFS", "hdfs://node-1:9000"); /** * 參數優先級: 一、客戶端代碼中設置的值 二、classpath下的用戶自定義配置文件 三、而後是jar中默認配置 */ // 獲取一個hdfs的訪問客戶端 fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root"); } /** * 往hdfs上傳文件 * * @throws Exception */ @Test public void testAddFileToHdfs() throws Exception { // 要上傳的文件所在的本地路徑 // 要上傳到hdfs的目標路徑*/ Path src = new Path("d:/GameLog.txt"); Path dst = new Path("/"); fs.copyFromLocalFile(src, dst); fs.close(); } /** * 從hdfs中複製文件到本地文件系統 * * @throws IOException * @throws IllegalArgumentException */ @Test public void testDownloadFileToLocal() throws IllegalArgumentException, IOException { // fs.copyToLocalFile(new Path("/mysql-connector-java-5.1.28.jar"), new // Path("d:/")); fs.copyToLocalFile(false, new Path("/install.log.syslog"), new Path("e:/"), true); fs.close(); } /** * 目錄操做 * * @throws IllegalArgumentException * @throws IOException */ @Test public void testMkdirAndDeleteAndRename() throws IllegalArgumentException, IOException { // 建立目錄 fs.mkdirs(new Path("/a1/b1/c1")); // 刪除文件夾 ,若是是非空文件夾,參數2必須給值true ,刪除全部子文件夾 fs.delete(new Path("/aaa"), true); // 重命名文件或文件夾 fs.rename(new Path("/a1"), new Path("/a2")); } /** * 查看目錄信息,只顯示文件 * * @throws IOException * @throws IllegalArgumentException * @throws FileNotFoundException */ @Test public void testListFiles() throws FileNotFoundException, IllegalArgumentException, IOException { RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true); while (listFiles.hasNext()) { LocatedFileStatus fileStatus = listFiles.next(); System.out.println(fileStatus.getPath().getName()); System.out.println(fileStatus.getBlockSize()); System.out.println(fileStatus.getPermission()); System.out.println(fileStatus.getLen()); BlockLocation[] blockLocations = fileStatus.getBlockLocations(); for (BlockLocation bl : blockLocations) { System.out.println("block-length:" + bl.getLength() + "--" + "block-offset:" + bl.getOffset()); String[] hosts = bl.getHosts(); for (String host : hosts) { System.out.println(host); } } System.out.println("--------------打印的分割線--------------"); } } /** * 查看文件及文件夾信息 * * @throws IOException * @throws IllegalArgumentException * @throws FileNotFoundException */ @Test public void testListAll() throws FileNotFoundException, IllegalArgumentException, IOException { //能夠右擊方法名,Run 測試一下。 FileStatus[] listStatus = fs.listStatus(new Path("/")); String flag = ""; for (FileStatus fstatus : listStatus) { if (fstatus.isFile()) { flag = "f-- "; } else { flag = "d-- "; } System.out.println(flag + fstatus.getPath().getName()); System.out.println(fstatus.getPermission()); } } }