Java 类org.apache.hadoop.util.ClassUtil 实例源码

项目:hadoop-oss    文件:FileSystem.java   
private static void loadFileSystems() {
  synchronized (FileSystem.class) {
    if (!FILE_SYSTEMS_LOADED) {
      ServiceLoader<FileSystem> serviceLoader = ServiceLoader.load(FileSystem.class);
      Iterator<FileSystem> it = serviceLoader.iterator();
      while (it.hasNext()) {
        FileSystem fs = null;
        try {
          fs = it.next();
          try {
            SERVICE_FILE_SYSTEMS.put(fs.getScheme(), fs.getClass());
          } catch (Exception e) {
            LOG.warn("Cannot load: " + fs + " from " +
                ClassUtil.findContainingJar(fs.getClass()), e);
          }
        } catch (ServiceConfigurationError ee) {
          LOG.warn("Cannot load filesystem", ee);
        }
      }
      FILE_SYSTEMS_LOADED = true;
    }
  }
}
项目:hadoop    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:aliyun-oss-hadoop-fs    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:aliyun-oss-hadoop-fs    文件:FileSystem.java   
private static void loadFileSystems() {
  synchronized (FileSystem.class) {
    if (!FILE_SYSTEMS_LOADED) {
      ServiceLoader<FileSystem> serviceLoader = ServiceLoader.load(FileSystem.class);
      Iterator<FileSystem> it = serviceLoader.iterator();
      while (it.hasNext()) {
        FileSystem fs = null;
        try {
          fs = it.next();
          try {
            SERVICE_FILE_SYSTEMS.put(fs.getScheme(), fs.getClass());
          } catch (Exception e) {
            LOG.warn("Cannot load: " + fs + " from " +
                ClassUtil.findContainingJar(fs.getClass()), e);
          }
        } catch (ServiceConfigurationError ee) {
          LOG.warn("Cannot load filesystem", ee);
        }
      }
      FILE_SYSTEMS_LOADED = true;
    }
  }
}
项目:big-c    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:hadoop-plus    文件:TestJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:FlexMap    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:hops    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:hops    文件:FileSystem.java   
private static void loadFileSystems() {
  synchronized (FileSystem.class) {
    if (!FILE_SYSTEMS_LOADED) {
      ServiceLoader<FileSystem> serviceLoader = ServiceLoader.load(FileSystem.class);
      Iterator<FileSystem> it = serviceLoader.iterator();
      while (it.hasNext()) {
        FileSystem fs = null;
        try {
          fs = it.next();
          try {
            SERVICE_FILE_SYSTEMS.put(fs.getScheme(), fs.getClass());
          } catch (Exception e) {
            LOG.warn("Cannot load: " + fs + " from " +
                ClassUtil.findContainingJar(fs.getClass()), e);
          }
        } catch (ServiceConfigurationError ee) {
          LOG.warn("Cannot load filesystem", ee);
        }
      }
      FILE_SYSTEMS_LOADED = true;
    }
  }
}
项目:incubator-gobblin    文件:EmbeddedGobblinDistcp.java   
public EmbeddedGobblinDistcp(Path from, Path to) throws JobTemplate.TemplateException, IOException {
  super("Distcp");
  try {
    setTemplate(ResourceBasedJobTemplate.forResourcePath("templates/distcp.template"));
  } catch (URISyntaxException | SpecNotFoundException exc) {
    throw new RuntimeException("Could not instantiate an " + EmbeddedGobblinDistcp.class.getName(), exc);
  }
  this.setConfiguration("from", from.toString());
  this.setConfiguration("to", to.toString());
  // Infer source and target fs uris from the input paths
  this.setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, from.getFileSystem(new Configuration()).getUri().toString());
  this.setConfiguration(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, to.getFileSystem(new Configuration()).getUri().toString());

  // add gobblin-data-management jar to distributed jars
  this.distributeJar(ClassUtil.findContainingJar(CopySource.class));
}
项目:hadoop-TCP    文件:TestJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:hardfs    文件:TestJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:hadoop-on-lustre2    文件:TestMRCJCJobConf.java   
/**
 * Given a path with a jar, make a classloader with that jar on the
 * classpath, and check that findContainingJar can correctly
 * identify the path of the jar.
 */
private void testJarAtPath(String path) throws Exception {
  File jar = new File(path).getAbsoluteFile();
  assertTrue(jar.exists());

  URL urls[] = new URL[] {
    jar.toURI().toURL()
  };

  ClassLoader cl = new URLClassLoader(urls);
  Class clazz = Class.forName(CLASSNAME, true, cl);
  assertNotNull(clazz);

  String containingJar = ClassUtil.findContainingJar(clazz);
  assertEquals(jar.getAbsolutePath(), containingJar);
}
项目:mrgeo    文件:HadoopUtils.java   
public static void setJar(Job job, Class clazz) throws IOException
{
  Configuration conf = job.getConfiguration();

  if (isLocal(conf))
  {
    String jar = ClassUtil.findContainingJar(clazz);

    if (jar != null)
    {
      conf.set("mapreduce.job.jar", jar);
    }
  }
  else
  {
    DependencyLoader.addDependencies(job, clazz);
    DataProviderFactory.addDependencies(conf);
  }
}
项目:mrgeo    文件:HadoopUtils.java   
public static String getJar(Configuration conf, Class clazz) throws IOException
{

  if (isLocal(conf))
  {
    String jar = ClassUtil.findContainingJar(clazz);

    if (jar != null)
    {
      conf.set("mapreduce.job.jar", jar);
    }
  }

  return DependencyLoader.getMasterJar(clazz);
  //setJar(job.getConfiguration());
}
项目:hadoop    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:aliyun-oss-hadoop-fs    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:big-c    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:octopus    文件:VersionInfo.java   
public static void main(String[] args) {
    System.out.println("Octopus " + getVersion());
    System.out.println("SCM: " + getUrl() + ", revision: " + getRevision());
    System.out.println("Compiled by " + getUser() + " on " + getDate());
    System.out.println("From source with checksum " + getSrcChecksum());
    System.out.println("This command was run using " + ClassUtil.findContainingJar(VersionInfo.class));
}
项目:hadoop-plus    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:FlexMap    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:hops    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:tajo    文件:VersionInfo.java   
public static void main(String[] args) {
  LOG.debug("version: "+ getVersion());
  System.out.println("Tajo " + getVersion());
  System.out.println("Git " + getUrl() + " -r " + getRevision());
  System.out.println("Compiled by " + getUser() + " on " + getDate());
  System.out.println("Compiled with protoc " + getProtocVersion());
  System.out.println("From source with checksum " + getSrcChecksum());
  System.out.println("This command was run using " + ClassUtil.findContainingJar(VersionInfo.class));
}
项目:tajo-cdh    文件:VersionInfo.java   
public static void main(String[] args) {
  LOG.debug("version: "+ getVersion());
  System.out.println("Tajo " + getVersion());
  System.out.println("Git " + getUrl() + " -r " + getRevision());
  System.out.println("Compiled by " + getUser() + " on " + getDate());
  System.out.println("Compiled with protoc " + getProtocVersion());
  System.out.println("From source with checksum " + getSrcChecksum());
  System.out.println("This command was run using " + ClassUtil.findContainingJar(VersionInfo.class));
}
项目:hadoop-TCP    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:hardfs    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:hadoop-on-lustre2    文件:JobConf.java   
/**
 * Set the job's jar file by finding an example class location.
 * 
 * @param cls the example class.
 */
public void setJarByClass(Class cls) {
  String jar = ClassUtil.findContainingJar(cls);
  if (jar != null) {
    setJar(jar);
  }   
}
项目:tez    文件:VersionInfo.java   
public static void main(String[] args) {
  if (args.length != 1) {
    System.err.println("Invalid no. of args. Usage: VersionInfo <component-name>");
    System.exit(-1);
  }

  VersionInfo versionInfo = new VersionInfo(args[0]);
  System.out.println("VersionInfo: " + versionInfo.toString());
  System.out.println("This command was run using " +
      ClassUtil.findContainingJar(VersionInfo.class));
}
项目:TensorFlowOnYARN    文件:LaunchCluster.java   
public LaunchCluster(Configuration conf, YarnClient yarnClient, CommandLine cliParser) {
  this.conf = conf;
  this.yarnClient = yarnClient;
  appName = cliParser.getOptionValue(
      Constants.OPT_TF_APP_NAME, Constants.DEFAULT_APP_NAME);

  amMemory = Integer.parseInt(cliParser.getOptionValue(
      Constants.OPT_TF_APP_MASTER_MEMORY, Constants.DEFAULT_APP_MASTER_MEMORY));
  amVCores = Integer.parseInt(cliParser.getOptionValue(
      Constants.OPT_TF_APP_MASTER_VCORES, Constants.DEFAULT_APP_MASTER_VCORES));
  amQueue = cliParser.getOptionValue(
      Constants.OPT_TF_APP_MASTER_QUEUE, Constants.DEFAULT_APP_MASTER_QUEUE);
  containerMemory = Integer.parseInt(cliParser.getOptionValue(
      Constants.OPT_TF_CONTAINER_MEMORY, Constants.DEFAULT_CONTAINER_MEMORY));
  containerVCores = Integer.parseInt(cliParser.getOptionValue(
      Constants.OPT_TF_CONTAINER_VCORES, Constants.DEFAULT_CONTAINER_VCORES));

  if (cliParser.hasOption(Constants.OPT_TF_JAR)) {
    tfJar = cliParser.getOptionValue(Constants.OPT_TF_JAR);
  } else {
    tfJar = ClassUtil.findContainingJar(getClass());
  }

  if (cliParser.hasOption(Constants.OPT_TF_LIB)) {
    tfLib = cliParser.getOptionValue(Constants.OPT_TF_LIB);
  } else {
    tfLib = Utils.getParentDir(tfJar) + File.separator + Constants.TF_LIB_NAME;
  }

  workerNum = Integer.parseInt(
      cliParser.getOptionValue(Constants.OPT_TF_WORKER_NUM, Constants.DEFAULT_TF_WORKER_NUM));

  if (workerNum <= 0) {
    throw new IllegalArgumentException(
        "Illegal number of TensorFlow worker task specified: " + workerNum);
  }

  psNum = Integer.parseInt(
      cliParser.getOptionValue(Constants.OPT_TF_PS_NUM, Constants.DEFAULT_TF_PS_NUM));

  if (psNum < 0) {
    throw new IllegalArgumentException(
        "Illegal number of TensorFlow ps task specified: " + psNum);
  }
}
项目:hadoop    文件:TestMRJobs.java   
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
  LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);

  if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
    LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
             + " not found. Not running test.");
    return;
  }

  Configuration sleepConf = new Configuration(mrCluster.getConfig());
  // set master address to local to test that local mode applied iff framework == local
  sleepConf.set(MRConfig.MASTER_ADDRESS, "local");  

  SleepJob sleepJob = new SleepJob();
  sleepJob.setConf(sleepConf);

  // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
  Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);

  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  if (useRemoteJar) {
    final Path localJar = new Path(
        ClassUtil.findContainingJar(SleepJob.class));
    ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
        localFs.makeQualified(localJar.getParent()).toUri());
    job.setJar("viewfs:///jobjars/" + localJar.getName());
  } else {
    job.setJarByClass(SleepJob.class);
  }
  job.setMaxMapAttempts(1); // speed up failures
  job.submit();
  String trackingUrl = job.getTrackingURL();
  String jobId = job.getJobID().toString();
  boolean succeeded = job.waitForCompletion(true);
  Assert.assertTrue(succeeded);
  Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
  Assert.assertTrue("Tracking URL was " + trackingUrl +
                    " but didn't Match Job ID " + jobId ,
        trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
  verifySleepJobCounters(job);
  verifyTaskProgress(job);

  // TODO later:  add explicit "isUber()" checks of some sort (extend
  // JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
项目:aliyun-oss-hadoop-fs    文件:TestMRJobs.java   
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
  LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);

  if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
    LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
             + " not found. Not running test.");
    return;
  }

  Configuration sleepConf = new Configuration(mrCluster.getConfig());
  // set master address to local to test that local mode applied iff framework == local
  sleepConf.set(MRConfig.MASTER_ADDRESS, "local");  

  SleepJob sleepJob = new SleepJob();
  sleepJob.setConf(sleepConf);

  // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
  Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);

  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  if (useRemoteJar) {
    final Path localJar = new Path(
        ClassUtil.findContainingJar(SleepJob.class));
    ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
        localFs.makeQualified(localJar.getParent()).toUri());
    job.setJar("viewfs:///jobjars/" + localJar.getName());
  } else {
    job.setJarByClass(SleepJob.class);
  }
  job.setMaxMapAttempts(1); // speed up failures
  job.submit();
  String trackingUrl = job.getTrackingURL();
  String jobId = job.getJobID().toString();
  boolean succeeded = job.waitForCompletion(true);
  Assert.assertTrue(succeeded);
  Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
  Assert.assertTrue("Tracking URL was " + trackingUrl +
                    " but didn't Match Job ID " + jobId ,
        trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
  verifySleepJobCounters(job);
  verifyTaskProgress(job);

  // TODO later:  add explicit "isUber()" checks of some sort (extend
  // JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
项目:big-c    文件:TestMRJobs.java   
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
  LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);

  if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
    LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
             + " not found. Not running test.");
    return;
  }

  Configuration sleepConf = new Configuration(mrCluster.getConfig());
  // set master address to local to test that local mode applied iff framework == local
  sleepConf.set(MRConfig.MASTER_ADDRESS, "local");  

  SleepJob sleepJob = new SleepJob();
  sleepJob.setConf(sleepConf);

  // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
  Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);

  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  if (useRemoteJar) {
    final Path localJar = new Path(
        ClassUtil.findContainingJar(SleepJob.class));
    ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
        localFs.makeQualified(localJar.getParent()).toUri());
    job.setJar("viewfs:///jobjars/" + localJar.getName());
  } else {
    job.setJarByClass(SleepJob.class);
  }
  job.setMaxMapAttempts(1); // speed up failures
  job.submit();
  String trackingUrl = job.getTrackingURL();
  String jobId = job.getJobID().toString();
  boolean succeeded = job.waitForCompletion(true);
  Assert.assertTrue(succeeded);
  Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
  Assert.assertTrue("Tracking URL was " + trackingUrl +
                    " but didn't Match Job ID " + jobId ,
        trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
  verifySleepJobCounters(job);
  verifyTaskProgress(job);

  // TODO later:  add explicit "isUber()" checks of some sort (extend
  // JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestMRJobs.java   
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
  LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);

  if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
    LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
             + " not found. Not running test.");
    return;
  }

  Configuration sleepConf = new Configuration(mrCluster.getConfig());
  // set master address to local to test that local mode applied iff framework == local
  sleepConf.set(MRConfig.MASTER_ADDRESS, "local");  

  SleepJob sleepJob = new SleepJob();
  sleepJob.setConf(sleepConf);

  // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
  Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);

  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  if (useRemoteJar) {
    final Path localJar = new Path(
        ClassUtil.findContainingJar(SleepJob.class));
    ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
        localFs.makeQualified(localJar.getParent()).toUri());
    job.setJar("viewfs:///jobjars/" + localJar.getName());
  } else {
    job.setJarByClass(SleepJob.class);
  }
  job.setMaxMapAttempts(1); // speed up failures
  job.submit();
  String trackingUrl = job.getTrackingURL();
  String jobId = job.getJobID().toString();
  boolean succeeded = job.waitForCompletion(true);
  Assert.assertTrue(succeeded);
  Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
  Assert.assertTrue("Tracking URL was " + trackingUrl +
                    " but didn't Match Job ID " + jobId ,
        trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
  verifySleepJobCounters(job);
  verifyTaskProgress(job);

  // TODO later:  add explicit "isUber()" checks of some sort (extend
  // JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
项目:FlexMap    文件:TestMRJobs.java   
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
  LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);

  if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
    LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
             + " not found. Not running test.");
    return;
  }

  Configuration sleepConf = new Configuration(mrCluster.getConfig());
  // set master address to local to test that local mode applied iff framework == local
  sleepConf.set(MRConfig.MASTER_ADDRESS, "local");  

  SleepJob sleepJob = new SleepJob();
  sleepJob.setConf(sleepConf);

  // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
  Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);

  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  if (useRemoteJar) {
    final Path localJar = new Path(
        ClassUtil.findContainingJar(SleepJob.class));
    ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
        localFs.makeQualified(localJar.getParent()).toUri());
    job.setJar("viewfs:///jobjars/" + localJar.getName());
  } else {
    job.setJarByClass(SleepJob.class);
  }
  job.setMaxMapAttempts(1); // speed up failures
  job.submit();
  String trackingUrl = job.getTrackingURL();
  String jobId = job.getJobID().toString();
  boolean succeeded = job.waitForCompletion(true);
  Assert.assertTrue(succeeded);
  Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
  Assert.assertTrue("Tracking URL was " + trackingUrl +
                    " but didn't Match Job ID " + jobId ,
        trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
  verifySleepJobCounters(job);
  verifyTaskProgress(job);

  // TODO later:  add explicit "isUber()" checks of some sort (extend
  // JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
项目:hops    文件:TestMRJobs.java   
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
  LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);

  if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
    LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
             + " not found. Not running test.");
    return;
  }

  Configuration sleepConf = new Configuration(mrCluster.getConfig());
  // set master address to local to test that local mode applied iff framework == local
  sleepConf.set(MRConfig.MASTER_ADDRESS, "local");  

  SleepJob sleepJob = new SleepJob();
  sleepJob.setConf(sleepConf);

  // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
  Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);

  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  if (useRemoteJar) {
    final Path localJar = new Path(
        ClassUtil.findContainingJar(SleepJob.class));
    ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
        localFs.makeQualified(localJar.getParent()).toUri());
    job.setJar("viewfs:///jobjars/" + localJar.getName());
  } else {
    job.setJarByClass(SleepJob.class);
  }
  job.setMaxMapAttempts(1); // speed up failures
  job.submit();
  String trackingUrl = job.getTrackingURL();
  String jobId = job.getJobID().toString();
  boolean succeeded = job.waitForCompletion(true);
  Assert.assertTrue(succeeded);
  Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
  Assert.assertTrue("Tracking URL was " + trackingUrl +
                    " but didn't Match Job ID " + jobId ,
        trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
  verifySleepJobCounters(job);
  verifyTaskProgress(job);

  // TODO later:  add explicit "isUber()" checks of some sort (extend
  // JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
项目:mrgeo    文件:DependencyLoader.java   
@SuppressWarnings("squid:S1166") // Exception caught and handled
private static Set<Dependency> loadDependenciesByReflection(Class<?> clazz) throws IOException
{
  String jar = ClassUtil.findContainingJar(clazz);

  if (jar != null)
  {
    return loadDependenciesFromJar(jar);
  }
  else
  {
    // the properties may have been added on the classpath, lets see if we can find it...
    Set<Dependency> deps = null;

    // set up a resource scanner
    Reflections reflections = new Reflections(new ConfigurationBuilder()
        .setUrls(ClasspathHelper.forPackage(ClassUtils.getPackageName(DependencyLoader.class)))
        .setScanners(new ResourcesScanner()));

    Set<String> resources = reflections.getResources(Pattern.compile(".*dependencies\\.properties"));
    for (String resource : resources)
    {
      log.debug("Loading dependency properties from: /" + resource);

      InputStream is = DependencyLoader.class.getResourceAsStream("/" + resource);

      try
      {
        Set<Dependency> d = readDependencies(is);
        is.close();

        if (deps == null)
        {
          deps = d;
        }
        else
        {
          deps.addAll(d);
        }
      }
      finally
      {
        if (is != null)
        {
          try
          {
            is.close();
          }
          catch (IOException ignored)
          {
          }
        }
      }
    }

    return deps;
  }
}
项目:hadoop    文件:JobConf.java   
/** 
 * Find a jar that contains a class of the same name, if any.
 * It will return a jar file, even if that is not the first thing
 * on the class path that has a class with the same name.
 * 
 * @param my_class the class to find.
 * @return a jar file that contains the class, or null.
 */
public static String findContainingJar(Class my_class) {
  return ClassUtil.findContainingJar(my_class);
}
项目:aliyun-oss-hadoop-fs    文件:JobConf.java   
/** 
 * Find a jar that contains a class of the same name, if any.
 * It will return a jar file, even if that is not the first thing
 * on the class path that has a class with the same name.
 * 
 * @param my_class the class to find.
 * @return a jar file that contains the class, or null.
 */
public static String findContainingJar(Class my_class) {
  return ClassUtil.findContainingJar(my_class);
}