Hadoop 系列 HDFS:HDFS的Java API( Java API應用)

HDFS的Java API

 Java API應用

 上傳文件

先在本地(客戶端)一個文件,好比在D盤下新建一個word2.txt文件,內容隨便寫java

somewhere 
palyer
Hadoop
you belong to me

 在IDEA中編寫Java程序node

 

package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

public class PutFile {
    public static void main(String[] args) throws IOException, InterruptedException {
        //本地文件路徑
        String local = "D:\\word2.txt";
        String dest = "hdfs://192.168.55.128:9000/input/word2.txt";
        Configuration cfg=new Configuration();
        FileSystem fs= FileSystem.get(URI.create(dest),cfg,"root");
        fs.copyFromLocalFile(new Path(local),new Path(dest));
        fs.close();


    }
}

 再次說明,String dest="hdfs://192.168.80.128:9000/user/root/input/word2.txt"要與core-site.xml文件中的fs.defaultFS配置對應,其值是hdfs://node1:9000。因爲本地Windows系統的hosts文件沒有配置node1,因此這裏須要IP地址表示。apache


執行結果
oop

[root@node1 ~]# hdfs dfs -ls /input
Found 2 items
-rw-r--r--   3 root supergroup         52 2018-11-26 07:27 /input/word2.txt
-rw-r--r--   3 root supergroup         42 2018-11-26 04:10 /input/words.txt
[root@node1 ~]#

 

FileSystem fs= FileSystem.get(URI.create(dest),cfg,"root");語句中指定了root用戶,這是由於Windows系統默認用戶是Administrator。若是程序中不指定用戶名root,則可能拋出異常:Permission denied: user=Administratorurl

下載文件

package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

public class GetFile {
    public static void main(String[] args) throws IOException {
        String hdfsPath="hdfs://192.168.55.128:9000/input/words.txt";
        String localPath="D:/copy_words.txt";
        Configuration cfg=new Configuration();
        FileSystem fs=FileSystem.get(URI.create(hdfsPath),cfg);
        Path hdfs_path=new Path(hdfsPath);
        Path local_path=new Path(localPath);
        fs.copyToLocalFile(false,hdfs_path,local_path,true);
        fs.close();
    }
}

 

建立HDFS目錄

package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

public class CreateDtt {
    public static void main(String[] args) throws IOException, InterruptedException {
        String url = "hdfs://192.168.55.128:9000/tmp/";
        Configuration cfg=new Configuration();
        FileSystem fs= FileSystem.get(URI.create(url),cfg,"root");
        boolean b=fs.mkdirs(new Path(url));
        System.out.println(b);
        fs.close();
    }
}

 

刪除HDFS文件或文件夾

先上傳一個文件到HDFS的/tmp目錄spa

[root@node1 ~]# hdfs dfs -put /root/words.txt /tmp
[root@node1 ~]# hdfs dfs -ls /tmp
Found 1 items
-rw-r--r--   3 root supergroup         42 2018-11-26 08:08 /tmp/words.txt

 

package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

public class DeleteFile {
    public static void main(String[] args) throws IOException, InterruptedException {
        String url = "hdfs://192.168.55.128:9000/tmp/";
        Configuration cfg=new Configuration();
        FileSystem fs= FileSystem.get(URI.create(url),cfg,"root");
        //參數true表示遞歸刪除文件夾及文件夾下的文件
        boolean b = fs.delete(new Path(url), true);
        System.out.println(b);
        fs.close();
    }
}

 

經過命令查看HDFS目錄,顯然HDFS的/tmp目錄已經被刪除.net

[root@node1 ~]# hdfs dfs -ls /
Found 2 items
drwxr-xr-x   - root supergroup          0 2018-11-26 07:27 /input
drwxr-xr-x   - root supergroup          0 2018-11-26 04:20 /user
[root@node1 ~]#

 

下載HDFS目錄

package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

public class CopyToLcalFile {
    public static void main(String[] args) throws IOException {
        String hdfspath = "hdfs://192.168.55.128:9000/input";
        String localPath="D:\\input";
        Configuration cfg=new Configuration();
        FileSystem fs= FileSystem.get(URI.create(hdfspath),cfg);
        Path hdfs_path=new Path(hdfspath);
        Path local_path=new Path(localPath);
        fs.copyToLocalFile(false,hdfs_path,local_path,true);
        fs.close();
    }
}

 

在D盤能夠查看到input目錄文件3d

上傳本地目錄(文件夾)

先在本地準備一個待上傳的目錄,這裏將剛纔下載的input目錄重命名爲wordscode

package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

public class CopyFromLocalFile {
public static void main(String[] args) throws IOException, InterruptedException {
String hdfsPath="hdfs://192.168.55.128:9000/user/";
String localPath="D:\\words";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf,"root");
Path hdfs_path = new Path(hdfsPath);
Path local_path = new Path(localPath);
fs.copyFromLocalFile(local_path, hdfs_path);
fs.close();

}
}

 

[root@node1 ~]# hdfs dfs -ls /user/words
Found 2 items
-rw-r--r--   3 root supergroup         52  10:01 /user/words/word2.txt
-rw-r--r--   3 root supergroup         42  10:01 /user/words/words.txt
[root@node1 ~]#

xml

相關文章
相關標籤/搜索