Java 类org.apache.hadoop.mapred.JobPriority 实例源码

项目:hadoop    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:aliyun-oss-hadoop-fs    文件:TypeConverter.java   
public static int toYarnApplicationPriority(String priority) {
  JobPriority jobPriority = JobPriority.valueOf(priority);
  switch (jobPriority) {
  case VERY_HIGH :
    return 5;
  case HIGH :
    return 4;
  case NORMAL :
    return 3;
  case LOW :
    return 2;
  case VERY_LOW :
    return 1;
  case DEFAULT :
    return 0;
  }
  throw new IllegalArgumentException("Unrecognized priority: " + priority);
}
项目:aliyun-oss-hadoop-fs    文件:TypeConverter.java   
private static JobPriority fromYarnPriority(int priority) {
  switch (priority) {
  case 5 :
    return JobPriority.VERY_HIGH;
  case 4 :
    return JobPriority.HIGH;
  case 3 :
    return JobPriority.NORMAL;
  case 2 :
    return JobPriority.LOW;
  case 1 :
    return JobPriority.VERY_LOW;
  case 0 :
    return JobPriority.DEFAULT;
  default :
    break;
  }
  return JobPriority.UNDEFINED_PRIORITY;
}
项目:aliyun-oss-hadoop-fs    文件:TypeConverter.java   
public static org.apache.hadoop.mapreduce.JobPriority
    fromYarnApplicationPriority(int priority) {
  switch (priority) {
  case 5 :
    return org.apache.hadoop.mapreduce.JobPriority.VERY_HIGH;
  case 4 :
    return org.apache.hadoop.mapreduce.JobPriority.HIGH;
  case 3 :
    return org.apache.hadoop.mapreduce.JobPriority.NORMAL;
  case 2 :
    return org.apache.hadoop.mapreduce.JobPriority.LOW;
  case 1 :
    return org.apache.hadoop.mapreduce.JobPriority.VERY_LOW;
  case 0 :
    return org.apache.hadoop.mapreduce.JobPriority.DEFAULT;
  default :
    break;
  }
  return org.apache.hadoop.mapreduce.JobPriority.UNDEFINED_PRIORITY;
}
项目:aliyun-oss-hadoop-fs    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:big-c    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hadoop-plus    文件:TypeConverter.java   
public static JobStatus fromYarn(ApplicationReport application,
    String jobFile) {
  String trackingUrl = application.getTrackingUrl();
  trackingUrl = trackingUrl == null ? "" : trackingUrl;
  JobStatus jobStatus =
    new JobStatus(
        TypeConverter.fromYarn(application.getApplicationId()),
        0.0f, 0.0f, 0.0f, 0.0f,
        TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
        org.apache.hadoop.mapreduce.JobPriority.NORMAL,
        application.getUser(), application.getName(),
        application.getQueue(), jobFile, trackingUrl, false
    );
  jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
  jobStatus.setStartTime(application.getStartTime());
  jobStatus.setFailureInfo(application.getDiagnostics());
  jobStatus.setNeededMem(application.getApplicationResourceUsageReport().getNeededResources().getMemory());
  jobStatus.setNumReservedSlots(application.getApplicationResourceUsageReport().getNumReservedContainers());
  jobStatus.setNumUsedSlots(application.getApplicationResourceUsageReport().getNumUsedContainers());
  jobStatus.setReservedMem(application.getApplicationResourceUsageReport().getReservedResources().getMemory());
  jobStatus.setUsedMem(application.getApplicationResourceUsageReport().getUsedResources().getMemory());
  return jobStatus;
}
项目:hadoop-plus    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hops    文件:TypeConverter.java   
public static int toYarnApplicationPriority(String priority) {
  JobPriority jobPriority = JobPriority.valueOf(priority);
  switch (jobPriority) {
  case VERY_HIGH :
    return 5;
  case HIGH :
    return 4;
  case NORMAL :
    return 3;
  case LOW :
    return 2;
  case VERY_LOW :
    return 1;
  case DEFAULT :
    return 0;
  }
  throw new IllegalArgumentException("Unrecognized priority: " + priority);
}
项目:hops    文件:TypeConverter.java   
private static JobPriority fromYarnPriority(int priority) {
  switch (priority) {
  case 5 :
    return JobPriority.VERY_HIGH;
  case 4 :
    return JobPriority.HIGH;
  case 3 :
    return JobPriority.NORMAL;
  case 2 :
    return JobPriority.LOW;
  case 1 :
    return JobPriority.VERY_LOW;
  case 0 :
    return JobPriority.DEFAULT;
  default :
    break;
  }
  return JobPriority.UNDEFINED_PRIORITY;
}
项目:hops    文件:TypeConverter.java   
public static org.apache.hadoop.mapreduce.JobPriority
    fromYarnApplicationPriority(int priority) {
  switch (priority) {
  case 5 :
    return org.apache.hadoop.mapreduce.JobPriority.VERY_HIGH;
  case 4 :
    return org.apache.hadoop.mapreduce.JobPriority.HIGH;
  case 3 :
    return org.apache.hadoop.mapreduce.JobPriority.NORMAL;
  case 2 :
    return org.apache.hadoop.mapreduce.JobPriority.LOW;
  case 1 :
    return org.apache.hadoop.mapreduce.JobPriority.VERY_LOW;
  case 0 :
    return org.apache.hadoop.mapreduce.JobPriority.DEFAULT;
  default :
    break;
  }
  return org.apache.hadoop.mapreduce.JobPriority.UNDEFINED_PRIORITY;
}
项目:hops    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:jumbune    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hadoop-TCP    文件:TypeConverter.java   
public static JobStatus fromYarn(ApplicationReport application,
    String jobFile) {
  String trackingUrl = application.getTrackingUrl();
  trackingUrl = trackingUrl == null ? "" : trackingUrl;
  JobStatus jobStatus =
    new JobStatus(
        TypeConverter.fromYarn(application.getApplicationId()),
        0.0f, 0.0f, 0.0f, 0.0f,
        TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
        org.apache.hadoop.mapreduce.JobPriority.NORMAL,
        application.getUser(), application.getName(),
        application.getQueue(), jobFile, trackingUrl, false
    );
  jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
  jobStatus.setStartTime(application.getStartTime());
  jobStatus.setFailureInfo(application.getDiagnostics());
  jobStatus.setNeededMem(application.getApplicationResourceUsageReport().getNeededResources().getMemory());
  jobStatus.setNumReservedSlots(application.getApplicationResourceUsageReport().getNumReservedContainers());
  jobStatus.setNumUsedSlots(application.getApplicationResourceUsageReport().getNumUsedContainers());
  jobStatus.setReservedMem(application.getApplicationResourceUsageReport().getReservedResources().getMemory());
  jobStatus.setUsedMem(application.getApplicationResourceUsageReport().getUsedResources().getMemory());
  return jobStatus;
}
项目:hadoop-TCP    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hadoop-on-lustre    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hardfs    文件:TypeConverter.java   
public static JobStatus fromYarn(ApplicationReport application,
    String jobFile) {
  String trackingUrl = application.getTrackingUrl();
  trackingUrl = trackingUrl == null ? "" : trackingUrl;
  JobStatus jobStatus =
    new JobStatus(
        TypeConverter.fromYarn(application.getApplicationId()),
        0.0f, 0.0f, 0.0f, 0.0f,
        TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
        org.apache.hadoop.mapreduce.JobPriority.NORMAL,
        application.getUser(), application.getName(),
        application.getQueue(), jobFile, trackingUrl, false
    );
  jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
  jobStatus.setStartTime(application.getStartTime());
  jobStatus.setFailureInfo(application.getDiagnostics());
  jobStatus.setNeededMem(application.getApplicationResourceUsageReport().getNeededResources().getMemory());
  jobStatus.setNumReservedSlots(application.getApplicationResourceUsageReport().getNumReservedContainers());
  jobStatus.setNumUsedSlots(application.getApplicationResourceUsageReport().getNumUsedContainers());
  jobStatus.setReservedMem(application.getApplicationResourceUsageReport().getReservedResources().getMemory());
  jobStatus.setUsedMem(application.getApplicationResourceUsageReport().getUsedResources().getMemory());
  return jobStatus;
}
项目:hardfs    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hadoop-on-lustre2    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:mapreduce-fork    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hortonworks-extension    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hortonworks-extension    文件:Job20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
    HistoryEventEmitter thatg) {
  JobID jobID = JobID.forName(jobIDName);

  if (jobIDName == null) {
    return null;
  }

  String priority = line.get("JOB_PRIORITY");

  if (priority != null) {
    return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
  }

  return null;
}
项目:hadoop    文件:TestEvents.java   
/**
 * simple test JobPriorityChangeEvent and JobPriorityChange
 * 
 * @throws Exception
 */

@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
  org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
  JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
      JobPriority.LOW);
  assertEquals(test.getJobId().toString(), jid.toString());
  assertEquals(test.getPriority(), JobPriority.LOW);

}
项目:hadoop    文件:TypeConverter.java   
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
  JobPriority jobPriority = JobPriority.NORMAL;
  JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
      fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
          .getMapProgress(), jobreport.getReduceProgress(), jobreport
          .getCleanupProgress(), fromYarn(jobreport.getJobState()),
      jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
          .getJobFile(), trackingUrl, jobreport.isUber());
  jobStatus.setStartTime(jobreport.getStartTime());
  jobStatus.setFinishTime(jobreport.getFinishTime());
  jobStatus.setFailureInfo(jobreport.getDiagnostics());
  return jobStatus;
}
项目:hadoop    文件:TypeConverter.java   
public static JobStatus fromYarn(ApplicationReport application,
    String jobFile) {
  String trackingUrl = application.getTrackingUrl();
  trackingUrl = trackingUrl == null ? "" : trackingUrl;
  JobStatus jobStatus =
    new JobStatus(
        TypeConverter.fromYarn(application.getApplicationId()),
        0.0f, 0.0f, 0.0f, 0.0f,
        TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
        org.apache.hadoop.mapreduce.JobPriority.NORMAL,
        application.getUser(), application.getName(),
        application.getQueue(), jobFile, trackingUrl, false
    );
  jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
  jobStatus.setStartTime(application.getStartTime());
  jobStatus.setFinishTime(application.getFinishTime());
  jobStatus.setFailureInfo(application.getDiagnostics());
  ApplicationResourceUsageReport resourceUsageReport =
      application.getApplicationResourceUsageReport();
  if (resourceUsageReport != null) {
    jobStatus.setNeededMem(
        resourceUsageReport.getNeededResources().getMemory());
    jobStatus.setNumReservedSlots(
        resourceUsageReport.getNumReservedContainers());
    jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
    jobStatus.setReservedMem(
        resourceUsageReport.getReservedResources().getMemory());
    jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
  }
  return jobStatus;
}
项目:hadoop    文件:JobHistoryParser.java   
/** Create a job info object where job information will be stored
 * after a parse
 */
public JobInfo() {
  submitTime = launchTime = finishTime = -1;
  totalMaps = totalReduces = failedMaps = failedReduces = 0;
  finishedMaps = finishedReduces = 0;
  username = jobname = jobConfPath = jobQueueName = "";
  tasksMap = new HashMap<TaskID, TaskInfo>();
  completedTaskAttemptsMap = new HashMap<TaskAttemptID, TaskAttemptInfo>();
  jobACLs = new HashMap<JobACL, AccessControlList>();
  priority = JobPriority.NORMAL;
}
项目:aliyun-oss-hadoop-fs    文件:TestEvents.java   
/**
 * simple test JobPriorityChangeEvent and JobPriorityChange
 * 
 * @throws Exception
 */

@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
  org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
  JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
      JobPriority.LOW);
  assertEquals(test.getJobId().toString(), jid.toString());
  assertEquals(test.getPriority(), JobPriority.LOW);

}
项目:aliyun-oss-hadoop-fs    文件:TypeConverter.java   
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
  JobPriority jobPriority = (jobreport.getJobPriority() == null)
      ? JobPriority.DEFAULT
      : fromYarnPriority(jobreport.getJobPriority().getPriority());
  JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
      fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(),
      jobreport.getMapProgress(), jobreport.getReduceProgress(),
      jobreport.getCleanupProgress(), fromYarn(jobreport.getJobState()),
      jobPriority, jobreport.getUser(), jobreport.getJobName(),
      jobreport.getJobFile(), trackingUrl, jobreport.isUber());
  jobStatus.setStartTime(jobreport.getStartTime());
  jobStatus.setFinishTime(jobreport.getFinishTime());
  jobStatus.setFailureInfo(jobreport.getDiagnostics());
  return jobStatus;
}
项目:aliyun-oss-hadoop-fs    文件:JobHistoryParser.java   
/** Create a job info object where job information will be stored
 * after a parse
 */
public JobInfo() {
  submitTime = launchTime = finishTime = -1;
  totalMaps = totalReduces = failedMaps = failedReduces = 0;
  finishedMaps = finishedReduces = 0;
  username = jobname = jobConfPath = jobQueueName = "";
  tasksMap = new HashMap<TaskID, TaskInfo>();
  completedTaskAttemptsMap = new HashMap<TaskAttemptID, TaskAttemptInfo>();
  jobACLs = new HashMap<JobACL, AccessControlList>();
  priority = JobPriority.NORMAL;
}
项目:big-c    文件:TestEvents.java   
/**
 * simple test JobPriorityChangeEvent and JobPriorityChange
 * 
 * @throws Exception
 */

@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
  org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
  JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
      JobPriority.LOW);
  assertEquals(test.getJobId().toString(), jid.toString());
  assertEquals(test.getPriority(), JobPriority.LOW);

}
项目:big-c    文件:TypeConverter.java   
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
  JobPriority jobPriority = JobPriority.NORMAL;
  JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
      fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
          .getMapProgress(), jobreport.getReduceProgress(), jobreport
          .getCleanupProgress(), fromYarn(jobreport.getJobState()),
      jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
          .getJobFile(), trackingUrl, jobreport.isUber());
  jobStatus.setStartTime(jobreport.getStartTime());
  jobStatus.setFinishTime(jobreport.getFinishTime());
  jobStatus.setFailureInfo(jobreport.getDiagnostics());
  return jobStatus;
}
项目:big-c    文件:TypeConverter.java   
public static JobStatus fromYarn(ApplicationReport application,
    String jobFile) {
  String trackingUrl = application.getTrackingUrl();
  trackingUrl = trackingUrl == null ? "" : trackingUrl;
  JobStatus jobStatus =
    new JobStatus(
        TypeConverter.fromYarn(application.getApplicationId()),
        0.0f, 0.0f, 0.0f, 0.0f,
        TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
        org.apache.hadoop.mapreduce.JobPriority.NORMAL,
        application.getUser(), application.getName(),
        application.getQueue(), jobFile, trackingUrl, false
    );
  jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
  jobStatus.setStartTime(application.getStartTime());
  jobStatus.setFinishTime(application.getFinishTime());
  jobStatus.setFailureInfo(application.getDiagnostics());
  ApplicationResourceUsageReport resourceUsageReport =
      application.getApplicationResourceUsageReport();
  if (resourceUsageReport != null) {
    jobStatus.setNeededMem(
        resourceUsageReport.getNeededResources().getMemory());
    jobStatus.setNumReservedSlots(
        resourceUsageReport.getNumReservedContainers());
    jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
    jobStatus.setReservedMem(
        resourceUsageReport.getReservedResources().getMemory());
    jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
  }
  return jobStatus;
}
项目:big-c    文件:JobHistoryParser.java   
/** Create a job info object where job information will be stored
 * after a parse
 */
public JobInfo() {
  submitTime = launchTime = finishTime = -1;
  totalMaps = totalReduces = failedMaps = failedReduces = 0;
  finishedMaps = finishedReduces = 0;
  username = jobname = jobConfPath = jobQueueName = "";
  tasksMap = new HashMap<TaskID, TaskInfo>();
  completedTaskAttemptsMap = new HashMap<TaskAttemptID, TaskAttemptInfo>();
  jobACLs = new HashMap<JobACL, AccessControlList>();
  priority = JobPriority.NORMAL;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestEvents.java   
/**
 * simple test JobPriorityChangeEvent and JobPriorityChange
 * 
 * @throws Exception
 */

@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
  org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
  JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
      JobPriority.LOW);
  assertEquals(test.getJobId().toString(), jid.toString());
  assertEquals(test.getPriority(), JobPriority.LOW);

}
项目:hadoop-2.6.0-cdh5.4.3    文件:TypeConverter.java   
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
  JobPriority jobPriority = JobPriority.NORMAL;
  JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
      fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
          .getMapProgress(), jobreport.getReduceProgress(), jobreport
          .getCleanupProgress(), fromYarn(jobreport.getJobState()),
      jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
          .getJobFile(), trackingUrl, jobreport.isUber());
  jobStatus.setStartTime(jobreport.getStartTime());
  jobStatus.setFinishTime(jobreport.getFinishTime());
  jobStatus.setFailureInfo(jobreport.getDiagnostics());
  return jobStatus;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TypeConverter.java   
public static JobStatus fromYarn(ApplicationReport application,
    String jobFile) {
  String trackingUrl = application.getTrackingUrl();
  trackingUrl = trackingUrl == null ? "" : trackingUrl;
  JobStatus jobStatus =
    new JobStatus(
        TypeConverter.fromYarn(application.getApplicationId()),
        0.0f, 0.0f, 0.0f, 0.0f,
        TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
        org.apache.hadoop.mapreduce.JobPriority.NORMAL,
        application.getUser(), application.getName(),
        application.getQueue(), jobFile, trackingUrl, false
    );
  jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
  jobStatus.setStartTime(application.getStartTime());
  jobStatus.setFinishTime(application.getFinishTime());
  jobStatus.setFailureInfo(application.getDiagnostics());
  ApplicationResourceUsageReport resourceUsageReport =
      application.getApplicationResourceUsageReport();
  if (resourceUsageReport != null) {
    jobStatus.setNeededMem(
        resourceUsageReport.getNeededResources().getMemory());
    jobStatus.setNumReservedSlots(
        resourceUsageReport.getNumReservedContainers());
    jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
    jobStatus.setReservedMem(
        resourceUsageReport.getReservedResources().getMemory());
    jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
  }
  return jobStatus;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobHistoryParser.java   
/** Create a job info object where job information will be stored
 * after a parse
 */
public JobInfo() {
  submitTime = launchTime = finishTime = -1;
  totalMaps = totalReduces = failedMaps = failedReduces = 0;
  finishedMaps = finishedReduces = 0;
  username = jobname = jobConfPath = jobQueueName = "";
  tasksMap = new HashMap<TaskID, TaskInfo>();
  completedTaskAttemptsMap = new HashMap<TaskAttemptID, TaskAttemptInfo>();
  jobACLs = new HashMap<JobACL, AccessControlList>();
  priority = JobPriority.NORMAL;
}
项目:hadoop-plus    文件:TestEvents.java   
/**
 * simple test JobPriorityChangeEvent and JobPriorityChange
 * 
 * @throws Exception
 */

@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
  org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
  JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
      JobPriority.LOW);
  assertEquals(test.getJobId().toString(), jid.toString());
  assertEquals(test.getPriority(), JobPriority.LOW);

}
项目:hadoop-plus    文件:TypeConverter.java   
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
  JobPriority jobPriority = JobPriority.NORMAL;
  JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
      fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
          .getMapProgress(), jobreport.getReduceProgress(), jobreport
          .getCleanupProgress(), fromYarn(jobreport.getJobState()),
      jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
          .getJobFile(), trackingUrl, jobreport.isUber());
  jobStatus.setFailureInfo(jobreport.getDiagnostics());
  return jobStatus;
}