用java運行Hadoop程序報錯:org.apache.hadoop.fs.LocalFileSystem cannot be cast to org.apache.

用java運行Hadoop例程報錯:org.apache.hadoop.fs.LocalFileSystem cannot be cast to org.apache.所寫代碼以下:java

package com.pcitc.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

/**
 * 獲取HDFS集羣上全部節點名稱
 * @author lenovo
 *
 */
public class GetList {
    public static void main(String[] args) throws IOException {
        Configuration conf = new Configuration();
        conf.set("dfs.default.name", "hdfs://hadoopmaster:9000");
        FileSystem fs = FileSystem.get(conf);
        DistributedFileSystem hdfs = (DistributedFileSystem) fs;
        DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
        String[] names = new String[dataNodeStats.length];
        for (int i = 0; i < dataNodeStats.length; i++) {
            names[i] = dataNodeStats[i].getHostName();
            System.out.println("node" + i + "name" + names[i]);
        }
    }
}

執行以後報以下錯誤:node

Exception in thread "main" java.lang.ClassCastException: org.apache.hadoop.fs.LocalFileSystem cannot be cast to org.apache.hadoop.hdfs.DistributedFileSystem
    at org.apache.hadoop.examples.FindFileOnHDFS.getHDFSNodes(FindFileOnHDFS.java:43)
    at org.apache.hadoop.examples.FindFileOnHDFS.main(FindFileOnHDFS.java:16)

緣由是DistributedFileSystem和LocalFileSystem都是FileSystem的子類,FileSystem.get(conf)獲得的是LocalFileSystem的instance, 這個類型應該是默認的,要得到DistributedFileSystem,須要配置conf對象,按照個人寫法我以爲應該是配了conf對象了,可是仍是保存,最後按照網上的說法進行相應修改就能夠了。直接上修改後的代碼以下(注意紅色部分):apache

package com.pcitc.hadoop;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

/**
 * 獲取HDFS集羣上全部節點名稱
 * 
 * @author lenovo
 * 
 */
public class GetList {
    public static void main(String[] args) throws IOException {
        Configuration conf = new Configuration();
        // conf.set("dfs.default.name", "hdfs://hadoopmaster:9000");
        String uri = "hdfs://hadoopmaster:9000";
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        DistributedFileSystem hdfs = (DistributedFileSystem) fs;
        DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
        String[] names = new String[dataNodeStats.length];
        for (int i = 0; i < dataNodeStats.length; i++) {
            names[i] = dataNodeStats[i].getHostName();
            System.out.println("node:" + i + ",name:" + names[i]);
        }
    }
}
相關文章
相關標籤/搜索