Java 类org.apache.hadoop.util.RunJar 实例源码

项目:spork-streaming    文件:NativeMapReduceOper.java   
public void runJob() throws JobCreationException {
    RunJarSecurityManager secMan = new RunJarSecurityManager();
    try {
        RunJar.main(getNativeMRParams());
        PigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
    } catch (SecurityException se) {
        if(secMan.getExitInvoked()) {
            if(secMan.getExitCode() != 0) {
                throw new JobCreationException("Native job returned with non-zero return code");
            }
            else {
                PigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
            }
        }
    } catch (Throwable t) {
        JobCreationException e = new JobCreationException(
                "Cannot run native mapreduce job "+ t.getMessage(), t);
        PigStatsUtil.addNativeJobStats(PigStats.get(), this, false, e);
        throw e;
    } finally {
        secMan.retire();
    }
}
项目:spork    文件:NativeTezOper.java   
public void runJob(String jobStatsKey) throws JobCreationException {
    RunJarSecurityManager secMan = new RunJarSecurityManager();
    try {
        RunJar.main(getNativeTezParams());
        ((TezPigScriptStats)PigStats.get()).addTezJobStatsForNative(jobStatsKey, this, true);
    } catch (SecurityException se) {
        if(secMan.getExitInvoked()) {
            if(secMan.getExitCode() != 0) {
                throw new JobCreationException("Native job returned with non-zero return code");
            }
            else {
                ((TezPigScriptStats)PigStats.get()).addTezJobStatsForNative(jobStatsKey, this, true);
            }
        }
    } catch (Throwable t) {
        JobCreationException e = new JobCreationException(
                "Cannot run native tez job "+ t.getMessage(), t);
        ((TezPigScriptStats)PigStats.get()).addTezJobStatsForNative(jobStatsKey, this, false);
        throw e;
    } finally {
        secMan.retire();
    }
}
项目:spork    文件:NativeMapReduceOper.java   
public void runJob() throws JobCreationException {
    RunJarSecurityManager secMan = new RunJarSecurityManager();
    try {
        RunJar.main(getNativeMRParams());
        MRPigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
    } catch (SecurityException se) {
        if(secMan.getExitInvoked()) {
            if(secMan.getExitCode() != 0) {
                throw new JobCreationException("Native job returned with non-zero return code");
            }
            else {
                MRPigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
            }
        }
    } catch (Throwable t) {
        JobCreationException e = new JobCreationException(
                "Cannot run native mapreduce job "+ t.getMessage(), t);
        MRPigStatsUtil.addNativeJobStats(PigStats.get(), this, false, e);
        throw e;
    } finally {
        secMan.retire();
    }
}
项目:PonIC    文件:NativeMapReduceOper.java   
public void runJob() throws JobCreationException {
    RunJarSecurityManager secMan = new RunJarSecurityManager();
    try {
        RunJar.main(getNativeMRParams());
        PigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
    } catch (SecurityException se) {
        if(secMan.getExitInvoked()) {
            if(secMan.getExitCode() != 0) {
                throw new JobCreationException("Native job returned with non-zero return code");
            }
            else {
                PigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
            }
        }
    } catch (Throwable t) {
        JobCreationException e = new JobCreationException(
                "Cannot run native mapreduce job "+ t.getMessage(), t);
        PigStatsUtil.addNativeJobStats(PigStats.get(), this, false, e);
        throw e;
    } finally {
        secMan.retire();
    }
}
项目:sedge    文件:NativeMapReduceOper.java   
public void runJob() throws JobCreationException {
    RunJarSecurityManager secMan = new RunJarSecurityManager();
    try {
        RunJar.main(getNativeMRParams());
        PigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
    } catch (SecurityException se) {
        if(secMan.getExitInvoked()) {
            if(secMan.getExitCode() != 0) {
                throw new JobCreationException("Native job returned with non-zero return code");
            }
            else {
                PigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
            }
        }
    } catch (Throwable t) {
        JobCreationException e = new JobCreationException(
                "Cannot run native mapreduce job "+ t.getMessage(), t);
        PigStatsUtil.addNativeJobStats(PigStats.get(), this, false, e);
        throw e;
    } finally {
        secMan.retire();
    }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobLocalizer.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 *
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
    try {
      status = userFs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
      lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);

    //Download job.jar
    userFs.copyToLocalFile(jarFilePath, localJarFile);
    localJobConf.setJar(localJarFile.toString());
    // also unjar the parts of the job.jar that need to end up on the
    // classpath, or explicitly requested by the user.
    RunJar.unJar(
      new File(localJarFile.toString()),
      new File(localJarFile.getParent().toString()),
      localJobConf.getJarUnpackPattern());
    FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
  }
}
项目:hadoop-on-lustre    文件:JobLocalizer.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 *
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
    try {
      status = userFs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
      lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);

    //Download job.jar
    userFs.copyToLocalFile(jarFilePath, localJarFile);
    localJobConf.setJar(localJarFile.toString());
    // Also un-jar the job.jar files. We un-jar it so that classes inside
    // sub-directories, for e.g., lib/, classes/ are available on class-path
    RunJar.unJar(new File(localJarFile.toString()),
        new File(localJarFile.getParent().toString()));
    FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
  }
}
项目:hanoi-hadoop-2.0.0-cdh    文件:JobLocalizer.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 *
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
    try {
      status = userFs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
      lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);

    //Download job.jar
    userFs.copyToLocalFile(jarFilePath, localJarFile);
    localJobConf.setJar(localJarFile.toString());
    // also unjar the parts of the job.jar that need to end up on the
    // classpath, or explicitly requested by the user.
    RunJar.unJar(
      new File(localJarFile.toString()),
      new File(localJarFile.getParent().toString()),
      localJobConf.getJarUnpackPattern());
    try {
      FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
    } catch (InterruptedException ie) {
      // This exception is never actually thrown, but the signature says
      // it is, and we can't make the incompatible change within CDH
      throw new IOException("Interrupted while chmodding", ie);
    }
  }
}
项目:mapreduce-fork    文件:TaskTracker.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 * 
 * @param jobId
 * @param localFs
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(String user, JobID jobId, FileSystem localFs,
    JobConf localJobConf)
    throws IOException, InterruptedException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem fs = getFS(jarFilePath, jobId, localJobConf);
    try {
      status = fs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
        lDirAlloc.getLocalPathForWrite(
            getJobJarFile(user, jobId.toString()), 5 * jarFileSize, fConf);

    // Download job.jar
    fs.copyToLocalFile(jarFilePath, localJarFile);

    localJobConf.setJar(localJarFile.toString());

    // Un-jar the parts of the job.jar that need to be added to the classpath
    RunJar.unJar(
      new File(localJarFile.toString()),
      new File(localJarFile.getParent().toString()),
      localJobConf.getJarUnpackPattern());
  }
}
项目:mammoth    文件:JobLocalizer.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 *
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
    try {
      status = userFs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
      lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);

    //Download job.jar
    userFs.copyToLocalFile(jarFilePath, localJarFile);
    localJobConf.setJar(localJarFile.toString());
    // Also un-jar the job.jar files. We un-jar it so that classes inside
    // sub-directories, for e.g., lib/, classes/ are available on class-path
    RunJar.unJar(new File(localJarFile.toString()),
        new File(localJarFile.getParent().toString()));
    FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
  }
}
项目:hortonworks-extension    文件:JobLocalizer.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 *
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
    try {
      status = userFs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
      lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);

    //Download job.jar
    userFs.copyToLocalFile(jarFilePath, localJarFile);
    localJobConf.setJar(localJarFile.toString());
    // Also un-jar the job.jar files. We un-jar it so that classes inside
    // sub-directories, for e.g., lib/, classes/ are available on class-path
    RunJar.unJar(new File(localJarFile.toString()),
        new File(localJarFile.getParent().toString()));
    FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
  }
}
项目:hortonworks-extension    文件:JobLocalizer.java   
/**
 * Download the job jar file from FS to the local file system and unjar it.
 * Set the local jar file in the passed configuration.
 *
 * @param localJobConf
 * @throws IOException
 */
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
  // copy Jar file to the local FS and unjar it.
  String jarFile = localJobConf.getJar();
  FileStatus status = null;
  long jarFileSize = -1;
  if (jarFile != null) {
    Path jarFilePath = new Path(jarFile);
    FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
    try {
      status = userFs.getFileStatus(jarFilePath);
      jarFileSize = status.getLen();
    } catch (FileNotFoundException fe) {
      jarFileSize = -1;
    }
    // Here we check for five times the size of jarFileSize to accommodate for
    // unjarring the jar file in the jars directory
    Path localJarFile =
      lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);

    //Download job.jar
    userFs.copyToLocalFile(jarFilePath, localJarFile);
    localJobConf.setJar(localJarFile.toString());
    // Also un-jar the job.jar files. We un-jar it so that classes inside
    // sub-directories, for e.g., lib/, classes/ are available on class-path
    RunJar.unJar(new File(localJarFile.toString()),
        new File(localJarFile.getParent().toString()));
    FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
  }
}
项目:t4f-data    文件:MapReduceRunJar.java   
public static void main(final String... args) throws Exception {

        File jarFolder = new File(System.getenv("HADOOP_HOME") + "/share/hadoop/mapreduce/");
        final File[] jar = jarFolder.listFiles(new FileFilter() {
            @Override
            public boolean accept(File pathname) {
                return pathname.getName().startsWith("hadoop-mapreduce-examples-");
            }
        });

        if (jar.length != 1) {
            throw new IllegalStateException("Can't find any hadoop-mapreduce-examples jar in " + jarFolder
                    + " - Check your $HADOOP_HOME");
        }

        new AosProcessLauncher() {
            @Override
            public void process() throws Exception {
                try {
                    String[] rjArgs = new String[args.length + 1];
                    rjArgs[0] = jar[0].getAbsolutePath();
                    System.arraycopy(args, 0, rjArgs, 1, args.length);
                    RunJar.main(rjArgs);
                } catch (Throwable t) {
                    throw new Exception(t);
                }
            }
        }.launch();

        // while (true) {
        // LOGGER.info("Sleeping...");
        // TimeUnit.MINUTES.sleep(1);
        // }

    }
项目:hadoop    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:aliyun-oss-hadoop-fs    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:big-c    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:hadoop-2.6.0-cdh5.4.3    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:hadoop-plus    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not Successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:ignite    文件:HadoopV2JobResourceManager.java   
/**
 * Process list of resources.
 *
 * @param jobLocDir Job working directory.
 * @param files Array of {@link URI} or {@link org.apache.hadoop.fs.Path} to process resources.
 * @param download {@code true}, if need to download. Process class path only else.
 * @param extract {@code true}, if need to extract archive.
 * @param clsPathUrls Collection to add resource as classpath resource.
 * @param rsrcNameProp Property for resource name array setting.
 * @throws IOException If failed.
 */
private void processFiles(File jobLocDir, @Nullable Object[] files, boolean download, boolean extract,
    @Nullable Collection<URL> clsPathUrls, @Nullable String rsrcNameProp) throws IOException {
    if (F.isEmptyOrNulls(files))
        return;

    Collection<String> res = new ArrayList<>();

    for (Object pathObj : files) {
        Path srcPath;

        if (pathObj instanceof URI) {
            URI uri = (URI)pathObj;

            srcPath = new Path(uri);
        }
        else
            srcPath = (Path)pathObj;

        String locName = srcPath.getName();

        File dstPath = new File(jobLocDir.getAbsolutePath(), locName);

        res.add(locName);

        rsrcSet.add(dstPath);

        if (clsPathUrls != null)
            clsPathUrls.add(dstPath.toURI().toURL());

        if (!download)
            continue;

        JobConf cfg = ctx.getJobConf();

        FileSystem dstFs = FileSystem.getLocal(cfg);

        FileSystem srcFs = job.fileSystem(srcPath.toUri(), cfg);

        if (extract) {
            File archivesPath = new File(jobLocDir.getAbsolutePath(), ".cached-archives");

            if (!archivesPath.exists() && !archivesPath.mkdir())
                throw new IOException("Failed to create directory " +
                    "[path=" + archivesPath + ", jobId=" + jobId + ']');

            File archiveFile = new File(archivesPath, locName);

            FileUtil.copy(srcFs, srcPath, dstFs, new Path(archiveFile.toString()), false, cfg);

            String archiveNameLC = archiveFile.getName().toLowerCase();

            if (archiveNameLC.endsWith(".jar"))
                RunJar.unJar(archiveFile, dstPath);
            else if (archiveNameLC.endsWith(".zip"))
                FileUtil.unZip(archiveFile, dstPath);
            else if (archiveNameLC.endsWith(".tar.gz") ||
                archiveNameLC.endsWith(".tgz") ||
                archiveNameLC.endsWith(".tar"))
                FileUtil.unTar(archiveFile, dstPath);
            else
                throw new IOException("Cannot unpack archive [path=" + srcPath + ", jobId=" + jobId + ']');
        }
        else
            FileUtil.copy(srcFs, srcPath, dstFs, new Path(dstPath.toString()), false, cfg);
    }

    if (!res.isEmpty() && rsrcNameProp != null)
        ctx.getJobConf().setStrings(rsrcNameProp, res.toArray(new String[res.size()]));
}
项目:hops    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:hadoop-TCP    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not Successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:hardfs    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not Successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:hadoop-on-lustre2    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not Successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : "
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }
项目:memnon    文件:JobSpawner.java   
public static void spawnRemote(RubyRun run)
        throws Throwable {
    LOG.debug(run.toString());
    String[] args = run.toArgs(false);
    RunJar.main(args);
}
项目:mapreduce-fork    文件:TrackerDistributedCacheManager.java   
Path localizeCache(Configuration conf,
                                  URI cache, long confFileStamp,
                                  CacheStatus cacheStatus,
                                  boolean isArchive, boolean isPublic)
throws IOException {
  FileSystem fs = FileSystem.get(cache, conf);
  FileSystem localFs = FileSystem.getLocal(conf);
  Path parchive = null;
  if (isArchive) {
    parchive = new Path(cacheStatus.localizedLoadPath,
      new Path(cacheStatus.localizedLoadPath.getName()));
  } else {
    parchive = cacheStatus.localizedLoadPath;
  }

  if (!localFs.mkdirs(parchive.getParent())) {
    throw new IOException("Mkdirs failed to create directory " +
        cacheStatus.localizedLoadPath.toString());
  }

  String cacheId = cache.getPath();
  fs.copyToLocalFile(new Path(cacheId), parchive);
  if (isArchive) {
    String tmpArchive = parchive.toString().toLowerCase();
    File srcFile = new File(parchive.toString());
    File destDir = new File(parchive.getParent().toString());
    LOG.info(String.format("Extracting %s to %s",
        srcFile.toString(), destDir.toString()));
    if (tmpArchive.endsWith(".jar")) {
      RunJar.unJar(srcFile, destDir);
    } else if (tmpArchive.endsWith(".zip")) {
      FileUtil.unZip(srcFile, destDir);
    } else if (isTarFile(tmpArchive)) {
      FileUtil.unTar(srcFile, destDir);
    } else {
      LOG.warn(String.format(
          "Cache file %s specified as archive, but not valid extension.", 
          srcFile.toString()));
      // else will not do anyhting
      // and copy the file into the dir as it is
    }
  }

  long cacheSize = 
    FileUtil.getDU(new File(parchive.getParent().toString()));
  cacheStatus.size = cacheSize;
  // Increase the size and sub directory count of the cache
  // from baseDirSize and baseDirNumberSubDir.
  baseDirManager.addCacheUpdate(cacheStatus);

  // set proper permissions for the localized directory
  setPermissions(conf, cacheStatus, isPublic);

  // update cacheStatus to reflect the newly cached file
  cacheStatus.mtime = getTimestamp(conf, cache);

  LOG.info(String.format("Cached %s as %s",
           cache.toString(), cacheStatus.localizedLoadPath));
  return cacheStatus.localizedLoadPath;
}
项目:mapreduce-fork    文件:StreamJob.java   
public int submitAndMonitorJob() throws IOException {

    if (jar_ != null && isLocalHadoop()) {
      // getAbs became required when shell and subvm have different working dirs...
      File wd = new File(".").getAbsoluteFile();
      RunJar.unJar(new File(jar_), wd);
    }

    // if jobConf_ changes must recreate a JobClient
    jc_ = new JobClient(jobConf_);
    running_ = null;
    try {
      running_ = jc_.submitJob(jobConf_);
      jobId_ = running_.getID();
      jobInfo();
      if (background_) {
        LOG.info("Job is running in background.");
      } else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
        LOG.error("Job not Successful!");
        return 1;
      }
      LOG.info("Output directory: " + output_);
    } catch(FileNotFoundException fe) {
      LOG.error("Error launching job , bad input path : " + fe.getMessage());
      return 2;
    } catch(InvalidJobConfException je) {
      LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
      return 3;
    } catch(FileAlreadyExistsException fae) {
      LOG.error("Error launching job , Output path already exists : " 
                + fae.getMessage());
      return 4;
    } catch(IOException ioe) {
      LOG.error("Error Launching job : " + ioe.getMessage());
      return 5;
    } catch (InterruptedException ie) {
      LOG.error("Error monitoring job : " + ie.getMessage());
      return 6;
    } finally {
      jc_.close();
    }
    return 0;
  }