hadoop源碼中的java模式-Builder,建造模式

1.概念 java

Builder建造模式是將複雜的內部建立封裝在內部,對於外部調用的人來講,只須要傳入建造者和建造工具,對於內部是如何建形成成品的,調用者無需關心。 apache

map-reduce客戶端代碼,用Builder模式構造客戶端。 工具

package Temperature.sort;
// == JobBuilder
import java.io.IOException; oop

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*; ui

public class JobBuilder {
 
  private final Class<?> driverClass;
  private final JobConf conf;
  private final int extraArgCount;
  private final String extrArgsUsage;
 
  private String[] extraArgs;
 
  public JobBuilder(Class<?> driverClass) {
    this(driverClass, 0, "");
  }
 
  public JobBuilder(Class<?> driverClass, int extraArgCount, String extrArgsUsage) {
    this.driverClass = driverClass;
    this.extraArgCount = extraArgCount;
    this.conf = new JobConf(driverClass);
    this.extrArgsUsage = extrArgsUsage;
  } this

  // vv JobBuilder
  public static JobConf parseInputAndOutput(Tool tool, Configuration conf,
      String[] args) {
   
    if (args.length != 2) {
      printUsage(tool, "<input> <output>");
      return null;
    }
    JobConf jobConf = new JobConf(conf, tool.getClass());
    FileInputFormat.addInputPath(jobConf, new Path(args[0]));
    FileOutputFormat.setOutputPath(jobConf, new Path(args[1]));
    return jobConf;
  } orm

  public static void printUsage(Tool tool, String extraArgsUsage) {
    System.err.printf("Usage: %s [genericOptions] %s\n\n",
        tool.getClass().getSimpleName(), extraArgsUsage);
    GenericOptionsParser.printGenericCommandUsage(System.err);
  }
  // ^^ JobBuilder
 
  public JobBuilder withCommandLineArgs(String... args) throws IOException {
    GenericOptionsParser parser = new GenericOptionsParser(conf, args);
    String[] otherArgs = parser.getRemainingArgs();
    if (otherArgs.length < 2 && otherArgs.length > 3 + extraArgCount) {
      System.err.printf("Usage: %s [genericOptions] [-overwrite] <input path> <output path> %s\n\n",
          driverClass.getSimpleName(), extrArgsUsage);
      GenericOptionsParser.printGenericCommandUsage(System.err);
      System.exit(-1);
    }
    int index = 0;
    boolean overwrite = false;
    if (otherArgs[index].equals("-overwrite")) {
      overwrite = true;
      index++;
    }
    Path input = new Path(otherArgs[index++]);
    Path output = new Path(otherArgs[index++]);
   
    if (index < otherArgs.length) {
      extraArgs = new String[otherArgs.length - index];
      System.arraycopy(otherArgs, index, extraArgs, 0, otherArgs.length - index);
    }
   
    if (overwrite) {
      output.getFileSystem(conf).delete(output, true);
    }
   
    FileInputFormat.addInputPath(conf, input);
    FileOutputFormat.setOutputPath(conf, output);
    return this;
  }
 
  public JobConf build() {
    return conf;
  }
 
  public String[] getExtraArgs() {
    return extraArgs;
  }
} hadoop

調用方法
JobConf conf = JobBuilder.parseInputAndOutput(this, getConf(), args);

JobBuilder.printUsage(this, "<ncdc input> <station input> <output>"); get

相關文章
相關標籤/搜索