hadoop的hdfs中的javaAPI操做

package cn.itcast.bigdata.hdfs;

import java.net.URI;
import java.util.Iterator;
import java.util.Map.Entry;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;
/**
 * 
 * 客戶端去操做hdfs時,是有一個用戶身份的
 * 默認狀況下,hdfs客戶端api會從jvm中獲取一個參數來做爲本身的用戶身份:-DHADOOP_USER_NAME=hadoop
 * 
 * 也能夠在構造客戶端fs對象時,經過參數傳遞進去
 * @author
 *
 */
public class HdfsClientDemo {
    FileSystem fs = null;
    Configuration conf = null;
    @Before
    public void init() throws Exception{
        
        conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://master:9000");
        
        //拿到一個文件系統操做的客戶端實例對象
        /*fs = FileSystem.get(conf);*/
        //能夠直接傳入 uri和用戶身份
        fs = FileSystem.get(new URI("hdfs://master:9000"),conf,"hadoop"); //最後一個參數爲用戶名
    }

    @Test
    public void testUpload() throws Exception {
        
        Thread.sleep(2000);
        fs.copyFromLocalFile(new Path("G:/access.log"), new Path("/access.log.copy"));
        fs.close();
    }
    
    
    @Test
    public void testDownload() throws Exception {
        
        fs.copyToLocalFile(new Path("/access.log.copy"), new Path("d:/"));
        fs.close();
    }
    
    @Test
    public void testConf(){
        Iterator<Entry<String, String>> iterator = conf.iterator();
        while (iterator.hasNext()) {
            Entry<String, String> entry = iterator.next();
            System.out.println(entry.getValue() + "--" + entry.getValue());//conf加載的內容
        }
    }
    
    /**
     * 建立目錄
     */
    @Test
    public void makdirTest() throws Exception {
        boolean mkdirs = fs.mkdirs(new Path("/aaa/bbb"));
        System.out.println(mkdirs);
    }
    
    /**
     * 刪除
     */
    @Test
    public void deleteTest() throws Exception{
        boolean delete = fs.delete(new Path("/aaa"), true);//true, 遞歸刪除
        System.out.println(delete);
    }
    
    @Test
    public void listTest() throws Exception{
        
        FileStatus[] listStatus = fs.listStatus(new Path("/"));
        for (FileStatus fileStatus : listStatus) {
            System.err.println(fileStatus.getPath()+"================="+fileStatus.toString());
        }
        //會遞歸找到全部的文件
        RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
        while(listFiles.hasNext()){
            LocatedFileStatus next = listFiles.next();
            String name = next.getPath().getName();
            Path path = next.getPath();
            System.out.println(name + "---" + path.toString());
        }
    }
    
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://master:9000");
        //拿到一個文件系統操做的客戶端實例對象
        FileSystem fs = FileSystem.get(conf);
        
        fs.copyFromLocalFile(new Path("G:/access.log"), new Path("/access.log.copy"));
        fs.close();
    }
    

}
package cn.itcast.bigdata.hdfs;

import java.net.URI;
import java.util.Iterator;
import java.util.Map.Entry;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;
/**
 * 
 * 客戶端去操做hdfs時,是有一個用戶身份的
 * 默認狀況下,hdfs客戶端api會從jvm中獲取一個參數來做爲本身的用戶身份:-DHADOOP_USER_NAME=hadoop
 * 
 * 也能夠在構造客戶端fs對象時,經過參數傳遞進去
 * @author
 *
 */
public class HdfsClientDemo {
    FileSystem fs = null;
    Configuration conf = null;
    @Before
    public void init() throws Exception{
        
        conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://master:9000");
        
        //拿到一個文件系統操做的客戶端實例對象
        /*fs = FileSystem.get(conf);*/
        //能夠直接傳入 uri和用戶身份
        fs = FileSystem.get(new URI("hdfs://master:9000"),conf,"hadoop"); //最後一個參數爲用戶名
    }

    @Test
    public void testUpload() throws Exception {
        
        Thread.sleep(2000);
        fs.copyFromLocalFile(new Path("G:/access.log"), new Path("/access.log.copy"));
        fs.close();
    }
    
    
    @Test
    public void testDownload() throws Exception {
        
        fs.copyToLocalFile(new Path("/access.log.copy"), new Path("d:/"));
        fs.close();
    }
    
    @Test
    public void testConf(){
        Iterator<Entry<String, String>> iterator = conf.iterator();
        while (iterator.hasNext()) {
            Entry<String, String> entry = iterator.next();
            System.out.println(entry.getValue() + "--" + entry.getValue());//conf加載的內容
        }
    }
    
    /**
     * 建立目錄
     */
    @Test
    public void makdirTest() throws Exception {
        boolean mkdirs = fs.mkdirs(new Path("/aaa/bbb"));
        System.out.println(mkdirs);
    }
    
    /**
     * 刪除
     */
    @Test
    public void deleteTest() throws Exception{
        boolean delete = fs.delete(new Path("/aaa"), true);//true, 遞歸刪除
        System.out.println(delete);
    }
    
    @Test
    public void listTest() throws Exception{
        
        FileStatus[] listStatus = fs.listStatus(new Path("/"));
        for (FileStatus fileStatus : listStatus) {
            System.err.println(fileStatus.getPath()+"================="+fileStatus.toString());
        }
        //會遞歸找到全部的文件
        RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
        while(listFiles.hasNext()){
            LocatedFileStatus next = listFiles.next();
            String name = next.getPath().getName();
            Path path = next.getPath();
            System.out.println(name + "---" + path.toString());
        }
    }
    
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://master:9000");
        //拿到一個文件系統操做的客戶端實例對象
        FileSystem fs = FileSystem.get(conf);
        
        fs.copyFromLocalFile(new Path("G:/access.log"), new Path("/access.log.copy"));
        fs.close();
    }
    

}
相關文章
相關標籤/搜索