Java 类org.apache.hadoop.mapred.JobACLsManager 实例源码

项目:hadoop    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  moveToDoneExecutor = createMoveToDoneThreadPool(numMoveThreads);
  super.serviceInit(conf);
}
项目:hadoop    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hadoop    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:hadoop    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:aliyun-oss-hadoop-fs    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat(
      "MoveIntermediateToDone Thread #%d").build();
  moveToDoneExecutor = new ThreadPoolExecutor(numMoveThreads, numMoveThreads,
      1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf);

  super.serviceInit(conf);
}
项目:aliyun-oss-hadoop-fs    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:aliyun-oss-hadoop-fs    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:aliyun-oss-hadoop-fs    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:big-c    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat(
      "MoveIntermediateToDone Thread #%d").build();
  moveToDoneExecutor = new ThreadPoolExecutor(numMoveThreads, numMoveThreads,
      1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf);

  super.serviceInit(conf);
}
项目:big-c    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:big-c    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:big-c    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hadoop-2.6.0-cdh5.4.3    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat(
      "MoveIntermediateToDone Thread #%d").build();
  moveToDoneExecutor = new ThreadPoolExecutor(numMoveThreads, numMoveThreads,
      1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf);

  super.serviceInit(conf);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hadoop-plus    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hadoop-plus    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:FlexMap    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat(
      "MoveIntermediateToDone Thread #%d").build();
  moveToDoneExecutor = new ThreadPoolExecutor(numMoveThreads, numMoveThreads,
      1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf);

  super.serviceInit(conf);
}
项目:FlexMap    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:FlexMap    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:FlexMap    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hops    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  moveToDoneExecutor = createMoveToDoneThreadPool(numMoveThreads);
  super.serviceInit(conf);
}
项目:hops    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hops    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:hops    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hadoop-TCP    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hadoop-TCP    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hardfs    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hardfs    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hadoop-on-lustre2    文件:HistoryFileManager.java   
@Override
protected void serviceInit(Configuration conf) throws Exception {
  this.conf = conf;

  int serialNumberLowDigits = 3;
  serialNumberFormat = ("%0"
      + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
      + "d");

  long maxFSWaitTime = conf.getLong(
      JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
  createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);

  this.aclsMgr = new JobACLsManager(conf);

  maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
      JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);

  jobListCache = createJobListCache();

  serialNumberIndex = new SerialNumberIndex(conf.getInt(
      JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
      JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));

  int numMoveThreads = conf.getInt(
      JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
      JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
  ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat(
      "MoveIntermediateToDone Thread #%d").build();
  moveToDoneExecutor = new ThreadPoolExecutor(numMoveThreads, numMoveThreads,
      1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf);

  super.serviceInit(conf);
}
项目:hadoop-on-lustre2    文件:CompletedJob.java   
public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
    boolean loadTasks, String userName, HistoryFileInfo info,
    JobACLsManager aclsMgr) 
        throws IOException {
  LOG.info("Loading job: " + jobId + " from file: " + historyFile);
  this.conf = conf;
  this.jobId = jobId;
  this.user = userName;
  this.info = info;
  this.aclsMgr = aclsMgr;
  loadFullHistoryData(loadTasks, historyFile);
}
项目:hadoop-on-lustre2    文件:TestHsWebServicesAcls.java   
public MockJobForAcls(Job mockJob, Configuration conf) {
  this.mockJob = mockJob;
  this.conf = conf;
  AccessControlList viewAcl = new AccessControlList(FRIENDLY_USER);
  this.jobAcls = new HashMap<JobACL, AccessControlList>();
  this.jobAcls.put(JobACL.VIEW_JOB, viewAcl);
  this.aclsMgr = new JobACLsManager(conf); 
}
项目:hadoop-on-lustre2    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hadoop    文件:JobImpl.java   
public JobImpl(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener,
    JobTokenSecretManager jobTokenSecretManager,
    Credentials jobCredentials, Clock clock,
    Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
    OutputCommitter committer, boolean newApiCommitter, String userName,
    long appSubmitTime, List<AMInfo> amInfos, AppContext appContext,
    JobStateInternal forcedState, String forcedDiagnostic) {
  this.applicationAttemptId = applicationAttemptId;
  this.jobId = jobId;
  this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
  this.conf = new JobConf(conf);
  this.metrics = metrics;
  this.clock = clock;
  this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
  this.amInfos = amInfos;
  this.appContext = appContext;
  this.userName = userName;
  this.queueName = conf.get(MRJobConfig.QUEUE_NAME, "default");
  this.appSubmitTime = appSubmitTime;
  this.oldJobId = TypeConverter.fromYarn(jobId);
  this.committer = committer;
  this.newApiCommitter = newApiCommitter;

  this.taskAttemptListener = taskAttemptListener;
  this.eventHandler = eventHandler;
  ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
  this.readLock = readWriteLock.readLock();
  this.writeLock = readWriteLock.writeLock();

  this.jobCredentials = jobCredentials;
  this.jobTokenSecretManager = jobTokenSecretManager;

  this.aclsManager = new JobACLsManager(conf);
  this.username = System.getProperty("user.name");
  this.jobACLs = aclsManager.constructJobACLs(conf);

  ThreadFactory threadFactory = new ThreadFactoryBuilder()
    .setNameFormat("Job Fail Wait Timeout Monitor #%d")
    .setDaemon(true)
    .build();
  this.executor = new ScheduledThreadPoolExecutor(1, threadFactory);

  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  stateMachine = stateMachineFactory.make(this);
  this.forcedState  = forcedState;
  if(forcedDiagnostic != null) {
    this.diagnostics.add(forcedDiagnostic);
  }

  this.maxAllowedFetchFailuresFraction = conf.getFloat(
      MRJobConfig.MAX_ALLOWED_FETCH_FAILURES_FRACTION,
      MRJobConfig.DEFAULT_MAX_ALLOWED_FETCH_FAILURES_FRACTION);
  this.maxFetchFailuresNotifications = conf.getInt(
      MRJobConfig.MAX_FETCH_FAILURES_NOTIFICATIONS,
      MRJobConfig.DEFAULT_MAX_FETCH_FAILURES_NOTIFICATIONS);
}
项目:aliyun-oss-hadoop-fs    文件:JobImpl.java   
public JobImpl(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener,
    JobTokenSecretManager jobTokenSecretManager,
    Credentials jobCredentials, Clock clock,
    Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
    OutputCommitter committer, boolean newApiCommitter, String userName,
    long appSubmitTime, List<AMInfo> amInfos, AppContext appContext,
    JobStateInternal forcedState, String forcedDiagnostic) {
  this.applicationAttemptId = applicationAttemptId;
  this.jobId = jobId;
  this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
  this.conf = new JobConf(conf);
  this.metrics = metrics;
  this.clock = clock;
  this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
  this.amInfos = amInfos;
  this.appContext = appContext;
  this.userName = userName;
  this.queueName = conf.get(MRJobConfig.QUEUE_NAME, "default");
  this.appSubmitTime = appSubmitTime;
  this.oldJobId = TypeConverter.fromYarn(jobId);
  this.committer = committer;
  this.newApiCommitter = newApiCommitter;

  this.taskAttemptListener = taskAttemptListener;
  this.eventHandler = eventHandler;
  ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
  this.readLock = readWriteLock.readLock();
  this.writeLock = readWriteLock.writeLock();

  this.jobCredentials = jobCredentials;
  this.jobTokenSecretManager = jobTokenSecretManager;

  this.aclsManager = new JobACLsManager(conf);
  this.reporterUserName = System.getProperty("user.name");
  this.jobACLs = aclsManager.constructJobACLs(conf);

  ThreadFactory threadFactory = new ThreadFactoryBuilder()
    .setNameFormat("Job Fail Wait Timeout Monitor #%d")
    .setDaemon(true)
    .build();
  this.executor = new ScheduledThreadPoolExecutor(1, threadFactory);

  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  stateMachine = stateMachineFactory.make(this);
  this.forcedState  = forcedState;
  if(forcedDiagnostic != null) {
    this.diagnostics.add(forcedDiagnostic);
  }

  this.maxAllowedFetchFailuresFraction = conf.getFloat(
      MRJobConfig.MAX_ALLOWED_FETCH_FAILURES_FRACTION,
      MRJobConfig.DEFAULT_MAX_ALLOWED_FETCH_FAILURES_FRACTION);
  this.maxFetchFailuresNotifications = conf.getInt(
      MRJobConfig.MAX_FETCH_FAILURES_NOTIFICATIONS,
      MRJobConfig.DEFAULT_MAX_FETCH_FAILURES_NOTIFICATIONS);
}
项目:big-c    文件:JobImpl.java   
public JobImpl(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener,
    JobTokenSecretManager jobTokenSecretManager,
    Credentials jobCredentials, Clock clock,
    Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
    OutputCommitter committer, boolean newApiCommitter, String userName,
    long appSubmitTime, List<AMInfo> amInfos, AppContext appContext,
    JobStateInternal forcedState, String forcedDiagnostic) {
  this.applicationAttemptId = applicationAttemptId;
  this.jobId = jobId;
  this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
  this.conf = new JobConf(conf);
  this.metrics = metrics;
  this.clock = clock;
  this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
  this.amInfos = amInfos;
  this.appContext = appContext;
  this.userName = userName;
  this.queueName = conf.get(MRJobConfig.QUEUE_NAME, "default");
  this.appSubmitTime = appSubmitTime;
  this.oldJobId = TypeConverter.fromYarn(jobId);
  this.committer = committer;
  this.newApiCommitter = newApiCommitter;

  this.taskAttemptListener = taskAttemptListener;
  this.eventHandler = eventHandler;
  ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
  this.readLock = readWriteLock.readLock();
  this.writeLock = readWriteLock.writeLock();

  this.jobCredentials = jobCredentials;
  this.jobTokenSecretManager = jobTokenSecretManager;

  this.aclsManager = new JobACLsManager(conf);
  this.username = System.getProperty("user.name");
  this.jobACLs = aclsManager.constructJobACLs(conf);

  ThreadFactory threadFactory = new ThreadFactoryBuilder()
    .setNameFormat("Job Fail Wait Timeout Monitor #%d")
    .setDaemon(true)
    .build();
  this.executor = new ScheduledThreadPoolExecutor(1, threadFactory);

  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  stateMachine = stateMachineFactory.make(this);
  this.forcedState  = forcedState;
  if(forcedDiagnostic != null) {
    this.diagnostics.add(forcedDiagnostic);
  }

  this.maxAllowedFetchFailuresFraction = conf.getFloat(
      MRJobConfig.MAX_ALLOWED_FETCH_FAILURES_FRACTION,
      MRJobConfig.DEFAULT_MAX_ALLOWED_FETCH_FAILURES_FRACTION);
  this.maxFetchFailuresNotifications = conf.getInt(
      MRJobConfig.MAX_FETCH_FAILURES_NOTIFICATIONS,
      MRJobConfig.DEFAULT_MAX_FETCH_FAILURES_NOTIFICATIONS);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobImpl.java   
public JobImpl(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener,
    JobTokenSecretManager jobTokenSecretManager,
    Credentials jobCredentials, Clock clock,
    Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
    OutputCommitter committer, boolean newApiCommitter, String userName,
    long appSubmitTime, List<AMInfo> amInfos, AppContext appContext,
    JobStateInternal forcedState, String forcedDiagnostic) {
  this.applicationAttemptId = applicationAttemptId;
  this.jobId = jobId;
  this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
  this.conf = new JobConf(conf);
  this.metrics = metrics;
  this.clock = clock;
  this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
  this.amInfos = amInfos;
  this.appContext = appContext;
  this.userName = userName;
  this.queueName = conf.get(MRJobConfig.QUEUE_NAME, "default");
  this.appSubmitTime = appSubmitTime;
  this.oldJobId = TypeConverter.fromYarn(jobId);
  this.committer = committer;
  this.newApiCommitter = newApiCommitter;

  this.taskAttemptListener = taskAttemptListener;
  this.eventHandler = eventHandler;
  ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
  this.readLock = readWriteLock.readLock();
  this.writeLock = readWriteLock.writeLock();

  this.jobCredentials = jobCredentials;
  this.jobTokenSecretManager = jobTokenSecretManager;

  this.aclsManager = new JobACLsManager(conf);
  this.username = System.getProperty("user.name");
  this.jobACLs = aclsManager.constructJobACLs(conf);

  ThreadFactory threadFactory = new ThreadFactoryBuilder()
    .setNameFormat("Job Fail Wait Timeout Monitor #%d")
    .setDaemon(true)
    .build();
  this.executor = new ScheduledThreadPoolExecutor(1, threadFactory);

  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  stateMachine = stateMachineFactory.make(this);
  this.forcedState  = forcedState;
  if(forcedDiagnostic != null) {
    this.diagnostics.add(forcedDiagnostic);
  }

  this.maxAllowedFetchFailuresFraction = conf.getFloat(
      MRJobConfig.MAX_ALLOWED_FETCH_FAILURES_FRACTION,
      MRJobConfig.DEFAULT_MAX_ALLOWED_FETCH_FAILURES_FRACTION);
  this.maxFetchFailuresNotifications = conf.getInt(
      MRJobConfig.MAX_FETCH_FAILURES_NOTIFICATIONS,
      MRJobConfig.DEFAULT_MAX_FETCH_FAILURES_NOTIFICATIONS);
}
项目:hadoop-plus    文件:JobImpl.java   
public JobImpl(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener,
    JobTokenSecretManager jobTokenSecretManager,
    Credentials jobCredentials, Clock clock,
    Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
    OutputCommitter committer, boolean newApiCommitter, String userName,
    long appSubmitTime, List<AMInfo> amInfos, AppContext appContext,
    JobStateInternal forcedState, String forcedDiagnostic) {
  this.applicationAttemptId = applicationAttemptId;
  this.jobId = jobId;
  this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
  this.conf = new JobConf(conf);
  this.metrics = metrics;
  this.clock = clock;
  this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
  this.amInfos = amInfos;
  this.appContext = appContext;
  this.userName = userName;
  this.queueName = conf.get(MRJobConfig.QUEUE_NAME, "default");
  this.appSubmitTime = appSubmitTime;
  this.oldJobId = TypeConverter.fromYarn(jobId);
  this.committer = committer;
  this.newApiCommitter = newApiCommitter;

  this.taskAttemptListener = taskAttemptListener;
  this.eventHandler = eventHandler;
  ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
  this.readLock = readWriteLock.readLock();
  this.writeLock = readWriteLock.writeLock();

  this.jobCredentials = jobCredentials;
  this.jobTokenSecretManager = jobTokenSecretManager;

  this.aclsManager = new JobACLsManager(conf);
  this.username = System.getProperty("user.name");
  this.jobACLs = aclsManager.constructJobACLs(conf);
  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  stateMachine = stateMachineFactory.make(this);
  this.forcedState  = forcedState;
  if(forcedDiagnostic != null) {
    this.diagnostics.add(forcedDiagnostic);
  }
}
项目:FlexMap    文件:JobImpl.java   
public JobImpl(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener,
    JobTokenSecretManager jobTokenSecretManager,
    Credentials jobCredentials, Clock clock,
    Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
    OutputCommitter committer, boolean newApiCommitter, String userName,
    long appSubmitTime, List<AMInfo> amInfos, AppContext appContext,
    JobStateInternal forcedState, String forcedDiagnostic) {
  this.applicationAttemptId = applicationAttemptId;
  this.jobId = jobId;
  this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
  this.conf = new JobConf(conf);
  this.metrics = metrics;
  this.clock = clock;
  this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
  this.amInfos = amInfos;
  this.appContext = appContext;
  this.userName = userName;
  this.queueName = conf.get(MRJobConfig.QUEUE_NAME, "default");
  this.appSubmitTime = appSubmitTime;
  this.oldJobId = TypeConverter.fromYarn(jobId);
  this.committer = committer;
  this.newApiCommitter = newApiCommitter;


  this.taskAttemptListener = taskAttemptListener;
  this.eventHandler = eventHandler;
  ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
  this.readLock = readWriteLock.readLock();
  this.writeLock = readWriteLock.writeLock();

  this.jobCredentials = jobCredentials;
  this.jobTokenSecretManager = jobTokenSecretManager;

  this.aclsManager = new JobACLsManager(conf);
  this.username = System.getProperty("user.name");
  this.jobACLs = aclsManager.constructJobACLs(conf);

  ThreadFactory threadFactory = new ThreadFactoryBuilder()
    .setNameFormat("Job Fail Wait Timeout Monitor #%d")
    .setDaemon(true)
    .build();
  this.executor = new ScheduledThreadPoolExecutor(1, threadFactory);

  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  stateMachine = stateMachineFactory.make(this);
  this.forcedState  = forcedState;
  if(forcedDiagnostic != null) {
    this.diagnostics.add(forcedDiagnostic);
  }

  this.maxAllowedFetchFailuresFraction = conf.getFloat(
      MRJobConfig.MAX_ALLOWED_FETCH_FAILURES_FRACTION,
      MRJobConfig.DEFAULT_MAX_ALLOWED_FETCH_FAILURES_FRACTION);
  this.maxFetchFailuresNotifications = conf.getInt(
      MRJobConfig.MAX_FETCH_FAILURES_NOTIFICATIONS,
      MRJobConfig.DEFAULT_MAX_FETCH_FAILURES_NOTIFICATIONS);
}