開始沒搞定插件問題,就弄了個dsf操做類,後面搞定了插件問題,這玩意也就聊勝於無了,仍是丟這裏算了。java
首先是一個配置,ztool.hadoop.propertieslinux
hadoop.home.dir=G:/hadoop/hadoop-2.4.1 hadoop.user.name=hadoop hadoop.server.ip=192.168.117.128 hadoop.server.hdfs.port=9000
前面兩個屬性後面代碼會有說明的。apache
屬性文件的讀取,方法多了,通常用commons-configuration包,我是本身把這個再整了一次,加了些自動處理,這個代碼中能夠無視,直接把代碼中的那部分改爲普通引用就行了。windows
logger部分,用了logback,也是處理了一下,處理了其在linux下會莫名其妙找不到配置文件的問題。這裏就不放出代碼了,直接把代碼中的那部分改爲普通引用就行了,我就不改了。工具
工具類代碼以下oop
package com.cnblogs.zxub.hadoop.dfs; import java.io.IOException; import java.net.URI; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.mapred.JobConf; import org.slf4j.Logger; import com.cnblogs.zxub.util.logger.ZLoggerFactory; import com.cnblogs.zxub.util.properties.PropertiesLoader; public class DfsUtil { private static final Logger logger = ZLoggerFactory .getLogger(DfsUtil.class); private final PropertiesConfiguration props = PropertiesLoader .getConfiguration("ztool.hadoop"); private Configuration config = null; private String hdfsPath = null; private String baseDir = null; public DfsUtil(String hdfs, Configuration config) { // windows下設置HADOOP_HOME後,還可能找不到winutils.exe,直接本身寫進去程序算了 System.setProperty("hadoop.home.dir", this.props.getString("hadoop.home.dir")); // 設置與dfs服務通訊的用戶名,免得換當前用戶名,也不改配置關閉權限控制了 System.setProperty("HADOOP_USER_NAME", this.props.getString("hadoop.user.name")); this.hdfsPath = (hdfs == null) ? "hdfs://" + this.props.getString("hadoop.server.ip") + ":" + this.props.getString("hadoop.server.hdfs.port") + "/" : hdfs; if (config == null) { JobConf conf = new JobConf(DfsUtil.class); conf.setJobName("HdfsDAO"); config = conf; } this.config = config; } public DfsUtil(Configuration conf) { this(null, conf); } public DfsUtil() { this(null, null); } public String getBaseDir() { return this.baseDir; } public void setBaseDir(String baseDir) { this.baseDir = baseDir; } public String getHdfsPath() { return this.hdfsPath; } public Configuration getConfig() { return this.config; } private String standardPath(String path) { if (this.baseDir == null) { this.baseDir = "/"; } if (this.baseDir.indexOf("/") != 0) { this.baseDir = "/" + this.baseDir; } if (this.baseDir.lastIndexOf("/") == this.baseDir.length() - 1) { this.baseDir = this.baseDir.replaceFirst("/$", ""); } if (path.indexOf("/") != 0) { path = "/" + path; } path = this.baseDir + path; if (path.lastIndexOf("/") == path.length() - 1) { path = path.replaceFirst("/$", ""); } if (path.isEmpty()) { path = "/"; } return path; } public void ll(String folder) throws IOException { folder = this.standardPath(folder); Path path = new Path(folder); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); FileStatus[] list = fs.listStatus(path); System.out.println("ll: " + folder); for (FileStatus f : list) { System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDirectory(), f.getLen()); } fs.close(); } public void mkdirs(String folder) throws IOException { folder = this.standardPath(folder); Path path = new Path(folder); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); if (!fs.exists(path)) { fs.mkdirs(path); logger.info("create: {}.", folder); } else { logger.warn("folder [{}] already exists, mkdirs failed.", folder); } fs.close(); } public void rm(String file) throws IOException { file = this.standardPath(file); Path path = new Path(file); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); fs.deleteOnExit(path); logger.info("delete: {}.", file); fs.close(); } public void newFile(String file, String content) throws IOException { file = this.standardPath(file); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); byte[] buff = content.getBytes(); FSDataOutputStream os = null; try { os = fs.create(new Path(file)); os.write(buff, 0, buff.length); logger.info("create: {}.", file); } finally { if (os != null) { os.close(); } } fs.close(); } public void scp(String local, String remote) throws IOException { remote = this.standardPath(remote); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); fs.copyFromLocalFile(new Path(local), new Path(remote)); logger.info("copy: from [{}] to [{}]", local, remote); fs.close(); } public void download(String remote, String local) throws IOException { remote = this.standardPath(remote); Path path = new Path(remote); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); fs.copyToLocalFile(path, new Path(local)); logger.info("download: from [{}] to [{}]", remote, local); fs.close(); } public void cat(String remote) throws IOException { remote = this.standardPath(remote); Path path = new Path(remote); FileSystem fs = FileSystem.get(URI.create(this.getHdfsPath()), this.getConfig()); FSDataInputStream fsdis = null; System.out.println("cat: " + remote); try { fsdis = fs.open(path); IOUtils.copyBytes(fsdis, System.out, 4096, false); } finally { IOUtils.closeStream(fsdis); fs.close(); } } public static void main(String[] args) throws IOException { DfsUtil hdfs = new DfsUtil(); // hdfs.setBaseDir("/test"); // hdfs.mkdirs("/debug_in"); // hdfs.newFile("/test.txt", "測試"); // hdfs.rm("/test.txt"); // hdfs.rm("/test"); // hdfs.scp("c:/q.txt", "/"); hdfs.ll("/"); // hdfs.download("/test.txt", "c:/t.txt"); // hdfs.cat("q.txt"); // hdfs.scp("c:/din/f1.txt", "debug_in"); // hdfs.scp("c:/din/f2.txt", "debug_in"); } }