chenqb 发表于 2018-10-31 10:24:02

hadoop1.x作业提交过程分析(源码分析第二篇)

/** Run a Hadoop job jar.If the main class is not in the jar's manifest,  
   * then it must be provided on the command line. */
  
public static void main(String[] args) throws Throwable {
  
    String usage = "RunJar jarFile args...";
  
    if (args.length < 1) {
  
      System.err.println(usage);
  
      System.exit(-1);
  
    }
  
    int firstArg = 0;
  
    String fileName = args;
  
    File file = new File(fileName);
  
    String mainClassName = null;
  
    JarFile jarFile;
  
    try {
  
      jarFile = new JarFile(fileName);
  
    } catch(IOException io) {
  
      throw new IOException("Error opening job jar: " + fileName)
  
      .initCause(io);
  
    }
  
    Manifest manifest = jarFile.getManifest();
  
    if (manifest != null) {
  
      mainClassName = manifest.getMainAttributes().getValue("Main-Class");
  
    }
  
    jarFile.close();
  
    if (mainClassName == null) {
  
      if (args.length < 2) {
  
      System.err.println(usage);
  
      System.exit(-1);
  
      }
  
      mainClassName = args;
  
    }
  
      //进行相应的一些目录处理工作
  
    mainClassName = mainClassName.replaceAll("/", ".");
  
    File tmpDir = new File(new Configuration().get("hadoop.tmp.dir"));
  
    tmpDir.mkdirs();
  
    if (!tmpDir.isDirectory()) {
  
      System.err.println("Mkdirs failed to create " + tmpDir);
  
      System.exit(-1);
  
    }
  
    final File workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
  
    workDir.delete();
  
    workDir.mkdirs();
  
    if (!workDir.isDirectory()) {
  
      System.err.println("Mkdirs failed to create " + workDir);
  
      System.exit(-1);
  
    }
  
    Runtime.getRuntime().addShutdownHook(new Thread() {
  
      public void run() {
  
          try {
  
            FileUtil.fullyDelete(workDir);
  
          } catch (IOException e) {
  
          }
  
      }
  
      });
  
    unJar(file, workDir);//解压jar包
  

  
    ArrayList classPath = new ArrayList();
  
    classPath.add(new File(workDir+"/").toURL());
  
    classPath.add(file.toURL());
  
    classPath.add(new File(workDir, "classes/").toURL());
  
    File[] libs = new File(workDir, "lib").listFiles();
  
    if (libs != null) {
  
      for (int i = 0; i < libs.length; i++) {
  
      classPath.add(libs.toURL());
  
      }
  
    }
  

  
    ClassLoader loader =
  
      new URLClassLoader(classPath.toArray(new URL));
  
    Thread.currentThread().setContextClassLoader(loader);
  
    Class mainClass = Class.forName(mainClassName, true, loader);
  
    Method main = mainClass.getMethod("main", new Class[] {
  
      Array.newInstance(String.class, 0).getClass()
  
    });
  
    String[] newArgs = Arrays.asList(args)
  
      .subList(firstArg, args.length).toArray(new String);
  
    try {
  
      main.invoke(null, new Object[] { newArgs });
  
    } catch (InvocationTargetException e) {
  
      throw e.getTargetException();
  
    }
  
}


页: [1]
查看完整版本: hadoop1.x作业提交过程分析(源码分析第二篇)