點擊project——Maven——next
java
(1)設置maven自動導入依賴jar包
勾選 Import Maven projects automatically,點擊apply
(2)配置pom.xml文件
node
pom.xml配置文件以下:apache
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.kaikeba.hadoop</groupId> <artifactId>com.kaikeba.hadoop</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <properties> <hadoop.version>2.7.3</hadoop.version> </properties> <dependencies> <dependency> <groupId>commons-cli</groupId> <artifactId>commons-cli</artifactId> <version>1.2</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>1.1.3</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> </dependency> <!-- 3.1.2 --> <!-- <dependency>--> <!-- <groupId>org.apache.hadoop</groupId>--> <!-- <artifactId>hadoop-hdfs-client</artifactId>--> <!-- <version>2.8.0</version>--> <!-- </dependency>--> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.7.3</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-app</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-hs</artifactId> <version>${hadoop.version}</version> </dependency> <!-- <dependency>--> <!-- <groupId>org.slf4j</groupId>--> <!-- <artifactId>slf4j-api</artifactId>--> <!-- <version>1.7.25</version>--> <!-- </dependency>--> <!-- <dependency>--> <!-- <groupId>log4j</groupId>--> <!-- <artifactId>log4j</artifactId>--> <!-- <version>1.2.17</version>--> <!-- </dependency>--> </dependencies> </project>
****把本地文件傳輸到HDFS**** package com.kaikeba.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.io.*; import java.net.URI; /** * 將本地文件系統的文件經過java-API寫入到HDFS文件 */ public class FileCopyFromLocal { public static void main(String[] args) { String source = "E:\\aa.mp4"; //獲取/data的m目錄存在(根據本身的環境更改) String destination = "hdfs://122.51.241.109:9000/data/hdfs01.mp4"; InputStream in = null; try { in = new BufferedInputStream(new FileInputStream(source)); //HDFS讀寫的配置文件 Configuration conf = new Configuration(); //生成一個文件系統對象 FileSystem fs = FileSystem.get(URI.create(destination),conf); //生成一個輸出流 OutputStream out = fs.create(new Path(destination)); IOUtils.copyBytes(in, out, 4096, true); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
**把HDFS中的文件傳輸到本地** package com.kaikeba.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; /** * 從HDFS讀取文件 * 打包運行jar包 [bruce@node-01 Desktop]$ hadoop jar com.kaikeba.hadoop-1.0-SNAPSHOT.jar com.kaikeba.hadoop.hdfs.FileReadFromHdfs */ public class FileReadFromHdfs { public static void main(String[] args) { try { // String srcFile = "hdfs://122.51.241.109:9000/data/hdfs01.mp4"; Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(srcFile),conf); FSDataInputStream hdfsInStream = fs.open(new Path(srcFile)); BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream("/opt/hdfs01.mp4")); IOUtils.copyBytes(hdfsInStream, outputStream, 4096, true); } catch (IOException e) { e.printStackTrace(); } } }
雙擊package
生成com.kaikeba.hadoop-1.0-SNAPSHOT.jar,考入到服務器中執行
執行命令:hadoop jar com.kaikeba.hadoop-1.0-SNAPSHOT.jar com.kaikeba.hadoop.hdfs.FileReadFromHdfs
注意: com.kaikeba.hadoop.hdfs.FileReadFromHdfs是全類名,根據本身的項目更改。api