Java 类org.springframework.batch.core.JobInstance 实例源码

项目:spring-batch-support    文件:JobServiceImplTest.java   
@Disabled
    @Test
    public void testGetJobs() throws Exception {
        Set<String> jobNames = new HashSet<>();
        jobNames.add("job1");
        jobNames.add("job2");
        jobNames.add("job3");

        Long job1Id = 1L;
        Long job2Id = 2L;
        List<Long> jobExecutions = new ArrayList<>();
        jobExecutions.add(job1Id);

        JobInstance jobInstance = new JobInstance(job1Id, "job1");

        expect(jobOperator.getJobNames()).andReturn(jobNames).anyTimes();
        expect(jobOperator.getJobInstances(eq("job1"), eq(0), eq(1))).andReturn(jobExecutions);
        expect(jobExplorer.getJobInstance(eq(job1Id))).andReturn(jobInstance);
//      expect(jobOperator.getJobInstances(eq("job2"), eq(0), eq(1))).andReturn(null);
        replayAll();
        assertThat(service.getJobs(), nullValue());
    }
项目:spring-cloud-dataflow    文件:JobCommandTests.java   
private static void createSampleJob(String jobName, int jobExecutionCount) {
    JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
    jobInstances.add(instance);
    TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList<String>(), null);
    Map<String, JobParameter> jobParameterMap = new HashMap<>();
    jobParameterMap.put("foo", new JobParameter("FOO", true));
    jobParameterMap.put("bar", new JobParameter("BAR", false));
    JobParameters jobParameters = new JobParameters(jobParameterMap);
    JobExecution jobExecution = null;
    for (int i = 0; i < jobExecutionCount; i++) {
        jobExecution = jobRepository.createJobExecution(instance, jobParameters, null);
        taskBatchDao.saveRelationship(taskExecution, jobExecution);
        StepExecution stepExecution = new StepExecution("foobar", jobExecution);
        jobRepository.add(stepExecution);
    }
}
项目:spring-cloud-dataflow    文件:DataflowTemplateTests.java   
private void assertCorrectMixins(RestTemplate restTemplate) {
    boolean containsMappingJackson2HttpMessageConverter = false;

    for (HttpMessageConverter<?> converter : restTemplate.getMessageConverters()) {
        if (converter instanceof MappingJackson2HttpMessageConverter) {
            containsMappingJackson2HttpMessageConverter = true;

            final MappingJackson2HttpMessageConverter jacksonConverter = (MappingJackson2HttpMessageConverter) converter;
            final ObjectMapper objectMapper = jacksonConverter.getObjectMapper();

            assertNotNull(objectMapper.findMixInClassFor(JobExecution.class));
            assertNotNull(objectMapper.findMixInClassFor(JobParameters.class));
            assertNotNull(objectMapper.findMixInClassFor(JobParameter.class));
            assertNotNull(objectMapper.findMixInClassFor(JobInstance.class));
            assertNotNull(objectMapper.findMixInClassFor(ExitStatus.class));
            assertNotNull(objectMapper.findMixInClassFor(StepExecution.class));
            assertNotNull(objectMapper.findMixInClassFor(ExecutionContext.class));
            assertNotNull(objectMapper.findMixInClassFor(StepExecutionHistory.class));
        }
    }

    if (!containsMappingJackson2HttpMessageConverter) {
        fail("Expected that the restTemplate's list of Message Converters contained a "
                + "MappingJackson2HttpMessageConverter");
    }
}
项目:eMonocot    文件:JobExecutionDaoImpl.java   
/**
 * @param resultSet Set the result set
 * @param rowNumber Set the row number
 * @throws SQLException if there is a problem
 * @return a job execution instance
 */
public final JobExecution mapRow(final ResultSet resultSet,
        final int rowNumber) throws SQLException {
    JobInstance jobInstance = new JobInstance(resultSet.getBigDecimal(
            "JOB_INSTANCE_ID").longValue(),
            new JobParameters(), resultSet.getString("JOB_NAME"));
    JobExecution jobExecution = new JobExecution(jobInstance,
            resultSet.getBigDecimal("JOB_EXECUTION_ID").longValue());
    jobExecution.setStartTime(resultSet.getTimestamp("START_TIME"));
    jobExecution.setCreateTime(resultSet.getTimestamp("CREATE_TIME"));
    jobExecution.setEndTime(resultSet.getTimestamp("END_TIME"));
    jobExecution.setStatus(BatchStatus.valueOf(resultSet
            .getString("STATUS")));
    ExitStatus exitStatus = new ExitStatus(
            resultSet.getString("EXIT_CODE"),
            resultSet.getString("EXIT_MESSAGE"));
    jobExecution.setExitStatus(exitStatus);
    return jobExecution;
}
项目:eMonocot    文件:JobInstanceDaoImpl.java   
@Override
public List<JobInstance> list(Integer page, Integer size) {
 HttpEntity<JobInstance> requestEntity = new HttpEntity<JobInstance>(httpHeaders);
 Map<String,Object> uriVariables = new HashMap<String,Object>();
 uriVariables.put("resource", resourceDir);
 if(size == null) {
     uriVariables.put("limit", "");
 } else {
     uriVariables.put("limit", size);
 }

 if(page == null) {
     uriVariables.put("start", "");
 } else {
     uriVariables.put("start", page);
 }


 ParameterizedTypeReference<List<JobInstance>> typeRef = new ParameterizedTypeReference<List<JobInstance>>() {};
 HttpEntity<List<JobInstance>> responseEntity = restTemplate.exchange(baseUri + "/{resource}?limit={limit}&start={start}", HttpMethod.GET,
         requestEntity, typeRef,uriVariables);
 return responseEntity.getBody();
}
项目:eMonocot    文件:JobInstanceController.java   
/**
 * @param jobInstance
 *            the job instance to save
 * @return A response entity containing a newly created job instance
 */
@RequestMapping(value = "/jobInstance",
        method = RequestMethod.POST)
public final ResponseEntity<JobInstance> create(@RequestBody final JobInstance jobInstance) {
    HttpHeaders httpHeaders = new HttpHeaders();
    try {
        httpHeaders.setLocation(new URI(baseUrl + "/jobInstance/"
                + jobInstance.getId()));
    } catch (URISyntaxException e) {
        logger.error(e.getMessage());
    }
    service.save(jobInstance);
    ResponseEntity<JobInstance> response = new ResponseEntity<JobInstance>(
            jobInstance, httpHeaders, HttpStatus.CREATED);
    return response;
}
项目:eMonocot    文件:TestDataManager.java   
/**
 * @param jobId
 *            Set the job id
 * @param jobName
 *            Set the job name
 * @param authorityName
 *            Set the authority name
 * @param version
 *            Set the version
 */
public void createJobInstance(String jobId,
        String jobName, String authorityName,
        String version) {
    enableAuthentication();
    Long id = null;
    if (jobId != null && jobId.length() > 0) {
        id = Long.parseLong(jobId);
    }
    Integer v = null;
    if (version != null && version.length() > 0) {
        v = Integer.parseInt(version);
    }
    Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>();

    if (authorityName != null && authorityName.length() > 0) {
        jobParameterMap.put("authority.name", new JobParameter(
                authorityName));
    }
    JobParameters jobParameters = new JobParameters(jobParameterMap);
    JobInstance jobInstance = new JobInstance(id, jobParameters, jobName);
    jobInstance.setVersion(v);
    data.push(jobInstance);
    jobInstanceService.save(jobInstance);
    disableAuthentication();
}
项目:eMonocot    文件:CustomModule.java   
@Override
public final void setupModule(final SetupContext setupContext) {
    SimpleKeyDeserializers keyDeserializers = new SimpleKeyDeserializers();
    keyDeserializers.addDeserializer(Location.class,
            new GeographicalRegionKeyDeserializer());
    setupContext.addKeyDeserializers(keyDeserializers);
    SimpleSerializers simpleSerializers = new SimpleSerializers();
    simpleSerializers.addSerializer(new JobInstanceSerializer());
    simpleSerializers.addSerializer(new JobExecutionSerializer());
    setupContext.addSerializers(simpleSerializers);

    SimpleDeserializers simpleDeserializers = new SimpleDeserializers();
    simpleDeserializers.addDeserializer(JobInstance.class,
            new JobInstanceDeserializer());
    simpleDeserializers.addDeserializer(JobExecution.class,
            new JobExecutionDeserializer(jobInstanceService));
    simpleDeserializers.addDeserializer(JobExecutionException.class,
            new JobExecutionExceptionDeserializer());
    setupContext.addDeserializers(simpleDeserializers);
}
项目:eMonocot    文件:JsonConversionTest.java   
/**
 *
 * @throws Exception
 *             if there is a problem serializing the object
 */
@Test
public void testWriteJobInstance() throws Exception {
    Map<String, JobParameter> jobParameterMap
    = new HashMap<String, JobParameter>();
    jobParameterMap.put("authority.name", new JobParameter("test"));
    JobInstance jobInstance = new JobInstance(1L, new JobParameters(
            jobParameterMap), "testJob");
    jobInstance.setVersion(1);

    try {
        objectMapper.writeValueAsString(jobInstance);
    } catch (Exception e) {
        fail("No exception expected here");
    }

}
项目:marklogic-spring-batch    文件:StepExecutionAdapter.java   
@Override
public StepExecution unmarshal(AdaptedStepExecution v) throws Exception {
    JobExecution je = new JobExecution(v.getJobExecutionId());
    JobInstance ji = new JobInstance(v.getJobInstanceId(), v.getJobName());
    je.setJobInstance(ji);
    StepExecution step = new StepExecution(v.getStepName(), je);
    step.setId(v.getId());
    step.setStartTime(v.getStartTime());
    step.setEndTime(v.getEndTime());
    step.setReadSkipCount(v.getReadSkipCount());
    step.setWriteSkipCount(v.getWriteSkipCount());
    step.setProcessSkipCount(v.getProcessSkipCount());
    step.setReadCount(v.getReadCount());
    step.setWriteCount(v.getWriteCount());
    step.setFilterCount(v.getFilterCount());
    step.setRollbackCount(v.getRollbackCount());
    step.setExitStatus(new ExitStatus(v.getExitCode()));
    step.setLastUpdated(v.getLastUpdated());
    step.setVersion(v.getVersion());
    step.setStatus(v.getStatus());
    step.setExecutionContext(v.getExecutionContext());
    return step;
}
项目:marklogic-spring-batch    文件:MarshallSpringBatchPojoToXmlTest.java   
@Test
public void marshallStepExecutionTest() throws Exception {
    JobInstance jobInstance = new JobInstance(1234L, "test");
    JobExecution jobExecution = new JobExecution(123L);
    jobExecution.setJobInstance(jobInstance);
    StepExecution step = new StepExecution("testStep", jobExecution);
    step.setLastUpdated(new Date(System.currentTimeMillis()));
    StepExecutionAdapter adapter = new StepExecutionAdapter();
    AdaptedStepExecution adStep = adapter.marshal(step);
    jaxb2Marshaller.marshal(adStep, result);
    Fragment frag = new Fragment(new DOMBuilder().build(doc));
    frag.setNamespaces(getNamespaceProvider().getNamespaces());
    frag.prettyPrint();
    frag.assertElementExists("/msb:stepExecution");
    frag.assertElementExists("/msb:stepExecution/msb:lastUpdated");
    frag.assertElementValue("/msb:stepExecution/msb:stepName", "testStep");
}
项目:marklogic-spring-batch    文件:MarkLogicJobInstanceDaoTests.java   
/**
 * Create and retrieve a job instance.
 */
@Transactional
@Test
public void testGetLastInstances() throws Exception {

    testCreateAndRetrieve();

    // unrelated job instance that should be ignored by the query
    jobInstanceDao.createJobInstance("anotherJob", new JobParameters());

    // we need two instances of the same job to check ordering
    jobInstanceDao.createJobInstance(fooJob, new JobParameters());

    List<JobInstance> jobInstances = jobInstanceDao.getJobInstances(fooJob, 0, 2);
    assertEquals(2, jobInstances.size());
    assertEquals(fooJob, jobInstances.get(0).getJobName());
    assertEquals(fooJob, jobInstances.get(1).getJobName());
    assertEquals(Integer.valueOf(0), jobInstances.get(0).getVersion());
    assertEquals(Integer.valueOf(0), jobInstances.get(1).getVersion());

    //assertTrue("Last instance should be first on the list", jobInstances.get(0).getCreateDateTime() > jobInstances.get(1)
    //  .getId());

}
项目:marklogic-spring-batch    文件:MarkLogicJobInstanceDaoTests.java   
/**
 * Create and retrieve a job instance.
 */
@Transactional
@Test
public void testGetLastInstancesPastEnd() throws Exception {

    testCreateAndRetrieve();

    // unrelated job instance that should be ignored by the query
    jobInstanceDao.createJobInstance("anotherJob", new JobParameters());

    // we need two instances of the same job to check ordering
    jobInstanceDao.createJobInstance(fooJob, new JobParameters());

    List<JobInstance> jobInstances = jobInstanceDao.getJobInstances(fooJob, 4, 2);
    assertEquals(0, jobInstances.size());

}
项目:marklogic-spring-batch    文件:MarkLogicJobExecutionDaoTests.java   
@Before
public void onSetUp() throws Exception {
    jobParameters = new JobParameters();
    jobInstance = new JobInstance(12345L, "execJob");
    execution = new JobExecution(jobInstance, new JobParameters());
    execution.setStartTime(new Date(System.currentTimeMillis()));
    execution.setLastUpdated(new Date(System.currentTimeMillis()));
    execution.setEndTime(new Date(System.currentTimeMillis()));
    jobExecutionDao = new MarkLogicJobExecutionDao(getClient(), getBatchProperties());
}
项目:AGIA    文件:JobLauncherDetails.java   
@SuppressWarnings("unchecked")
protected void executeInternal(JobExecutionContext context) {
    Map<String, Object> jobDataMap = context.getMergedJobDataMap();
    String jobName = (String) jobDataMap.get(JOB_NAME);
    LOGGER.info("Quartz trigger firing with Spring Batch jobName=" + jobName);

    try {
        Job job = jobLocator.getJob(jobName);

        JobParameters previousJobParameters = null;
        List<JobInstance> jobInstances = jobExplorer.getJobInstances(jobName, 0, 1);
        if ((jobInstances != null) && (jobInstances.size() > 0)) {
            previousJobParameters = jobInstances.get(0).getJobParameters();
        }

        JobParameters jobParameters = getJobParametersFromJobMap(jobDataMap, previousJobParameters);

        if (job.getJobParametersIncrementer() != null) {
            jobParameters = job.getJobParametersIncrementer().getNext(jobParameters);
        }

        jobLauncher.run(jobLocator.getJob(jobName), jobParameters);
    } catch (JobExecutionException e) {
        LOGGER.error("Could not execute job.", e);
    }
}
项目:spring-batch-tools    文件:BatchOperatorImplTest.java   
@Test
public void startWithCustomStringParametersWithPreviousParameters() throws Exception {
    final JobInstance previousInstance = mock(JobInstance.class);
    when(jobExplorer.getJobInstances(JOB_NAME, 0, 1)).thenReturn(Arrays.asList(previousInstance));

    final JobParameters previousParams = new JobParameters();
    final JobExecution previousExecution = mock(JobExecution.class);
    when(previousExecution.getJobParameters()).thenReturn(previousParams);
    when(jobExplorer.getJobExecutions(previousInstance)).thenReturn(Arrays.asList(previousExecution));

    final JobParameters incremented = new JobParametersBuilder(params).addString("test", "test").toJobParameters();
    when(jobParametersIncrementer.getNext(previousParams)).thenReturn(incremented);

    final JobParameters expected = new JobParametersBuilder(incremented).addString("foo", "bar").addLong("answer", 42L, false)
            .toJobParameters();
    when(jobLauncher.run(job, expected)).thenReturn(execution);

    final JobParameters parameters = new JobParametersBuilder().addString("foo", "bar").addLong("answer", 42L, false).toJobParameters();
    final long executionId = batchOperator.start(JOB_NAME, parameters);
    assertThat("job execution id", executionId, is(1L));
}
项目:batchers    文件:JobResultsServiceImpl.java   
public List<JobResult> getJobResults() {
    List<JobInstance> jobInstancesByJobName = jobExplorer.findJobInstancesByJobName(AbstractEmployeeJobConfig.EMPLOYEE_JOB, 0, Integer.MAX_VALUE);

    DateTime currentTime = new DateTime();
    List<JobStartParams> months = getJobStartParamsPreviousMonths(currentTime.getYear(), currentTime.getMonthOfYear());

    final Map<JobStartParams, JobResult> jobResultMap = getMapOfJobResultsForJobInstances(jobInstancesByJobName);

    List<JobResult> collect = months
            .stream()
            .map(mapJobStartParamsToJobResult(jobResultMap))
            .sorted((comparing(onYear).thenComparing(comparing(onMonth))).reversed())
            .collect(Collectors.toList());

    return collect;
}
项目:batchers    文件:JobResultsServiceImplTest.java   
@Test
public void testGetFinishedJobResults_SameDates_SortingIsDescOnDate() throws Exception {
    //ARRANGE
    JobInstance jobInstance1 = new JobInstance(1L, EmployeeJobConfigSingleJvm.EMPLOYEE_JOB);

    when(jobExplorer.findJobInstancesByJobName(EmployeeJobConfigSingleJvm.EMPLOYEE_JOB, 0, MAX_VALUE))
            .thenReturn(asList(jobInstance1));

    DateTime dateTime = new DateTime();
    JobExecution jobInstance1_jobExecution1 = new JobExecution(jobInstance1, 1L, createJobParameters(dateTime.getYear(), dateTime.getMonthOfYear()), null);
    jobInstance1_jobExecution1.setEndTime(getDateOfDay(3));
    JobExecution jobInstance1_jobExecution2 = new JobExecution(jobInstance1, 2L, createJobParameters(dateTime.getYear(), dateTime.getMonthOfYear()), null);
    jobInstance1_jobExecution2.setEndTime(getDateOfDay(4));

    when(jobExplorer.getJobExecutions(jobInstance1)).thenReturn(asList(jobInstance1_jobExecution1, jobInstance1_jobExecution2));
    //ACT
    List<JobResult> jobResults = jobResultsService.getJobResults();

    assertThat(jobResults.get(0).getJobExecutionResults().get(0).getEndTime()).isAfter(jobResults.get(0).getJobExecutionResults().get(1).getEndTime());
}
项目:spring-boot-starter-batch-web    文件:JobOperationsController.java   
/**
 * Borrowed from CommandLineJobRunner.
 * @param job the job that we need to find the next parameters for
 * @return the next job parameters if they can be located
 * @throws JobParametersNotFoundException if there is a problem
 */
private JobParameters getNextJobParameters(Job job) throws JobParametersNotFoundException {
    String jobIdentifier = job.getName();
    JobParameters jobParameters;
    List<JobInstance> lastInstances = jobExplorer.getJobInstances(jobIdentifier, 0, 1);

    JobParametersIncrementer incrementer = job.getJobParametersIncrementer();

    if (lastInstances.isEmpty()) {
        jobParameters = incrementer.getNext(new JobParameters());
        if (jobParameters == null) {
            throw new JobParametersNotFoundException("No bootstrap parameters found from incrementer for job="
                    + jobIdentifier);
        }
    }
    else {
        List<JobExecution> lastExecutions = jobExplorer.getJobExecutions(lastInstances.get(0));
        jobParameters = incrementer.getNext(lastExecutions.get(0).getJobParameters());
    }
    return jobParameters;
}
项目:spring-boot-starter-batch-web    文件:ProtocolListenerTest.java   
@Test
public void createProtocol() throws Exception {
    // Given
    JobExecution jobExecution = new JobExecution(1L, new JobParametersBuilder().addString("test", "value").toJobParameters());
    jobExecution.setJobInstance(new JobInstance(1L, "test-job"));
    jobExecution.setCreateTime(new Date());
    jobExecution.setStartTime(new Date());
    jobExecution.setEndTime(new Date());
    jobExecution.setExitStatus(new ExitStatus("COMPLETED_WITH_ERRORS", "This is a default exit message"));
    jobExecution.getExecutionContext().put("jobCounter", 1);
    StepExecution stepExecution = jobExecution.createStepExecution("test-step-1");
    stepExecution.getExecutionContext().put("stepCounter", 1);
    ProtocolListener protocolListener = new ProtocolListener();
    // When
    protocolListener.afterJob(jobExecution);
    // Then
    String output = this.outputCapture.toString();
    assertThat(output, containsString("Protocol for test-job"));
    assertThat(output, containsString("COMPLETED_WITH_ERRORS"));
}
项目:egovframework.rte.root    文件:EgovCommandLineRunner.java   
/**
 * 다음 실행 될 Batch Job의 Job Parameter를 생성한다.
 * 
 * @param job
 * @return JobParameters
 * @throws JobParametersNotFoundException 
 */
private JobParameters getNextJobParameters(Job job) throws JobParametersNotFoundException {
    String jobIdentifier = job.getName();
    JobParameters jobParameters;
    List<JobInstance> lastInstances = jobExplorer.getJobInstances(jobIdentifier, 0, 1);

    JobParametersIncrementer incrementer = job.getJobParametersIncrementer();
    if (incrementer == null) {
        throw new JobParametersNotFoundException("No job parameters incrementer found for job=" + jobIdentifier);
    }

    if (lastInstances.isEmpty()) {
        jobParameters = incrementer.getNext(new JobParameters());
        if (jobParameters == null) {
            throw new JobParametersNotFoundException("No bootstrap parameters found from incrementer for job="
                    + jobIdentifier);
        }
    }
    else {
        jobParameters = incrementer.getNext(lastInstances.get(0).getJobParameters());
    }
    return jobParameters;
}
项目:composed-task-runner    文件:ComposedRunnerVisitorTests.java   
private Collection<StepExecution> getStepExecutions() {
    JobExplorer jobExplorer = this.applicationContext.getBean(JobExplorer.class);
    List<JobInstance> jobInstances = jobExplorer.findJobInstancesByJobName("job", 0, 1);
    assertEquals(1, jobInstances.size());
    JobInstance jobInstance = jobInstances.get(0);
    List<JobExecution> jobExecutions = jobExplorer.getJobExecutions(jobInstance);
    assertEquals(1, jobExecutions.size());
    JobExecution jobExecution = jobExecutions.get(0);
    return jobExecution.getStepExecutions();
}
项目:https-github.com-g0t4-jenkins2-course-spring-boot    文件:JobLauncherCommandLineRunner.java   
private JobParameters getNextJobParameters(Job job,
        JobParameters additionalParameters) {
    String name = job.getName();
    JobParameters parameters = new JobParameters();
    List<JobInstance> lastInstances = this.jobExplorer.getJobInstances(name, 0, 1);
    JobParametersIncrementer incrementer = job.getJobParametersIncrementer();
    Map<String, JobParameter> additionals = additionalParameters.getParameters();
    if (lastInstances.isEmpty()) {
        // Start from a completely clean sheet
        if (incrementer != null) {
            parameters = incrementer.getNext(new JobParameters());
        }
    }
    else {
        List<JobExecution> previousExecutions = this.jobExplorer
                .getJobExecutions(lastInstances.get(0));
        JobExecution previousExecution = previousExecutions.get(0);
        if (previousExecution == null) {
            // Normally this will not happen - an instance exists with no executions
            if (incrementer != null) {
                parameters = incrementer.getNext(new JobParameters());
            }
        }
        else if (isStoppedOrFailed(previousExecution) && job.isRestartable()) {
            // Retry a failed or stopped execution
            parameters = previousExecution.getJobParameters();
            // Non-identifying additional parameters can be removed to a retry
            removeNonIdentifying(additionals);
        }
        else if (incrementer != null) {
            // New instance so increment the parameters if we can
            parameters = incrementer.getNext(previousExecution.getJobParameters());
        }
    }
    return merge(parameters, additionals);
}
项目:spring-boot-concourse    文件:JobLauncherCommandLineRunner.java   
private JobParameters getNextJobParameters(Job job,
        JobParameters additionalParameters) {
    String name = job.getName();
    JobParameters parameters = new JobParameters();
    List<JobInstance> lastInstances = this.jobExplorer.getJobInstances(name, 0, 1);
    JobParametersIncrementer incrementer = job.getJobParametersIncrementer();
    Map<String, JobParameter> additionals = additionalParameters.getParameters();
    if (lastInstances.isEmpty()) {
        // Start from a completely clean sheet
        if (incrementer != null) {
            parameters = incrementer.getNext(new JobParameters());
        }
    }
    else {
        List<JobExecution> previousExecutions = this.jobExplorer
                .getJobExecutions(lastInstances.get(0));
        JobExecution previousExecution = previousExecutions.get(0);
        if (previousExecution == null) {
            // Normally this will not happen - an instance exists with no executions
            if (incrementer != null) {
                parameters = incrementer.getNext(new JobParameters());
            }
        }
        else if (isStoppedOrFailed(previousExecution) && job.isRestartable()) {
            // Retry a failed or stopped execution
            parameters = previousExecution.getJobParameters();
            // Non-identifying additional parameters can be removed to a retry
            removeNonIdentifying(additionals);
        }
        else if (incrementer != null) {
            // New instance so increment the parameters if we can
            parameters = incrementer.getNext(previousExecution.getJobParameters());
        }
    }
    return merge(parameters, additionals);
}
项目:spring-cloud-dataflow    文件:JobExecutionResource.java   
public JobExecutionResource(TaskJobExecution taskJobExecution, TimeZone timeZone) {
    Assert.notNull(taskJobExecution, "taskJobExecution must not be null");
    this.taskExecutionId = taskJobExecution.getTaskId();
    this.jobExecution = taskJobExecution.getJobExecution();
    this.timeZone = timeZone;
    this.executionId = jobExecution.getId();
    this.jobId = jobExecution.getJobId();
    this.stepExecutionCount = jobExecution.getStepExecutions().size();
    this.jobParameters = converter.getProperties(jobExecution.getJobParameters());
    this.jobParametersString = fromJobParameters(jobExecution.getJobParameters());
    this.defined = taskJobExecution.isTaskDefined();
    JobInstance jobInstance = jobExecution.getJobInstance();
    if (jobInstance != null) {
        this.name = jobInstance.getJobName();
        this.restartable = JobUtils.isJobExecutionRestartable(jobExecution);
        this.abandonable = JobUtils.isJobExecutionAbandonable(jobExecution);
        this.stoppable = JobUtils.isJobExecutionStoppable(jobExecution);
    }
    else {
        this.name = "?";
    }

    // Duration is always in GMT
    durationFormat.setTimeZone(TimeUtils.getDefaultTimeZone());
    // The others can be localized
    timeFormat.setTimeZone(timeZone);
    dateFormat.setTimeZone(timeZone);
    if (jobExecution.getStartTime() != null) {
        this.startDate = dateFormat.format(jobExecution.getStartTime());
        this.startTime = timeFormat.format(jobExecution.getStartTime());
        Date endTime = jobExecution.getEndTime() != null ? jobExecution.getEndTime() : new Date();
        this.duration = durationFormat.format(new Date(endTime.getTime() - jobExecution.getStartTime().getTime()));
    }

}
项目:spring-cloud-dataflow    文件:JobInstanceExecutions.java   
public JobInstanceExecutions(JobInstance jobInstance, List<TaskJobExecution> taskJobExecutions) {
    Assert.notNull(jobInstance, "jobInstance must not be null");
    this.jobInstance = jobInstance;
    if (taskJobExecutions == null) {
        this.taskJobExecutions = Collections.emptyList();
    }
    else {
        this.taskJobExecutions = Collections.unmodifiableList(taskJobExecutions);
    }
}
项目:spring-cloud-dataflow    文件:DataFlowTemplate.java   
/**
 * Will augment the provided {@link RestTemplate} with the Jackson Mixins required by
 * Spring Cloud Data Flow, specifically:
 * <p>
 * <ul>
 * <li>{@link JobExecutionJacksonMixIn}
 * <li>{@link JobParametersJacksonMixIn}
 * <li>{@link JobParameterJacksonMixIn}
 * <li>{@link JobInstanceJacksonMixIn}
 * <li>{@link ExitStatusJacksonMixIn}
 * <li>{@link StepExecutionJacksonMixIn}
 * <li>{@link ExecutionContextJacksonMixIn}
 * <li>{@link StepExecutionHistoryJacksonMixIn}
 * </ul>
 * <p>
 * Furthermore, this method will also register the {@link Jackson2HalModule}
 *
 * @param restTemplate Can be null. Instantiates a new {@link RestTemplate} if null
 * @return RestTemplate with the required Jackson Mixins
 */
public static RestTemplate prepareRestTemplate(RestTemplate restTemplate) {
    if (restTemplate == null) {
        restTemplate = new RestTemplate();
    }

    restTemplate.setErrorHandler(new VndErrorResponseErrorHandler(restTemplate.getMessageConverters()));

    boolean containsMappingJackson2HttpMessageConverter = false;

    for (HttpMessageConverter<?> converter : restTemplate.getMessageConverters()) {
        if (converter instanceof MappingJackson2HttpMessageConverter) {
            containsMappingJackson2HttpMessageConverter = true;
            final MappingJackson2HttpMessageConverter jacksonConverter = (MappingJackson2HttpMessageConverter) converter;
            jacksonConverter.getObjectMapper().registerModule(new Jackson2HalModule())
                    .addMixIn(JobExecution.class, JobExecutionJacksonMixIn.class)
                    .addMixIn(JobParameters.class, JobParametersJacksonMixIn.class)
                    .addMixIn(JobParameter.class, JobParameterJacksonMixIn.class)
                    .addMixIn(JobInstance.class, JobInstanceJacksonMixIn.class)
                    .addMixIn(ExitStatus.class, ExitStatusJacksonMixIn.class)
                    .addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class)
                    .addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class)
                    .addMixIn(StepExecutionHistory.class, StepExecutionHistoryJacksonMixIn.class);
        }
    }

    if (!containsMappingJackson2HttpMessageConverter) {
        throw new IllegalArgumentException(
                "The RestTemplate does not contain a required " + "MappingJackson2HttpMessageConverter.");
    }
    return restTemplate;
}
项目:spring-cloud-dataflow    文件:JobExecutionDeserializationTests.java   
@Test
public void testDeserializationOfMultipleJobExecutions() throws IOException {

    final ObjectMapper objectMapper = new ObjectMapper();

    final InputStream inputStream = JobExecutionDeserializationTests.class
            .getResourceAsStream("/JobExecutionJson.txt");

    final String json = new String(StreamUtils.copyToByteArray(inputStream));

    objectMapper.registerModule(new Jackson2HalModule());
    objectMapper.addMixIn(JobExecution.class, JobExecutionJacksonMixIn.class);
    objectMapper.addMixIn(JobParameters.class, JobParametersJacksonMixIn.class);
    objectMapper.addMixIn(JobParameter.class, JobParameterJacksonMixIn.class);
    objectMapper.addMixIn(JobInstance.class, JobInstanceJacksonMixIn.class);
    objectMapper.addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class);
    objectMapper.addMixIn(StepExecutionHistory.class, StepExecutionHistoryJacksonMixIn.class);
    objectMapper.addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class);
    objectMapper.addMixIn(ExitStatus.class, ExitStatusJacksonMixIn.class);

    PagedResources<Resource<JobExecutionResource>> paged = objectMapper.readValue(json,
            new TypeReference<PagedResources<Resource<JobExecutionResource>>>() {
            });
    JobExecutionResource jobExecutionResource = paged.getContent().iterator().next().getContent();
    Assert.assertEquals("Expect 1 JobExecutionInfoResource", 6, paged.getContent().size());
    Assert.assertEquals(Long.valueOf(6), jobExecutionResource.getJobId());
    Assert.assertEquals("job200616815", jobExecutionResource.getName());
    Assert.assertEquals("COMPLETED", jobExecutionResource.getJobExecution().getStatus().name());

}
项目:spring-cloud-dataflow    文件:JobExecutionDeserializationTests.java   
@Test
public void testDeserializationOfSingleJobExecution() throws IOException {

    final ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.registerModule(new Jackson2HalModule());

    final InputStream inputStream = JobExecutionDeserializationTests.class
            .getResourceAsStream("/SingleJobExecutionJson.txt");

    final String json = new String(StreamUtils.copyToByteArray(inputStream));

    objectMapper.addMixIn(JobExecution.class, JobExecutionJacksonMixIn.class);
    objectMapper.addMixIn(JobParameters.class, JobParametersJacksonMixIn.class);
    objectMapper.addMixIn(JobParameter.class, JobParameterJacksonMixIn.class);
    objectMapper.addMixIn(JobInstance.class, JobInstanceJacksonMixIn.class);
    objectMapper.addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class);
    objectMapper.addMixIn(StepExecutionHistory.class, StepExecutionHistoryJacksonMixIn.class);
    objectMapper.addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class);
    objectMapper.addMixIn(ExitStatus.class, ExitStatusJacksonMixIn.class);
    objectMapper.setDateFormat(new ISO8601DateFormatWithMilliSeconds());

    final JobExecutionResource jobExecutionInfoResource = objectMapper.readValue(json, JobExecutionResource.class);

    Assert.assertNotNull(jobExecutionInfoResource);
    Assert.assertEquals(Long.valueOf(1), jobExecutionInfoResource.getJobId());
    Assert.assertEquals("ff.job", jobExecutionInfoResource.getName());
    Assert.assertEquals("COMPLETED", jobExecutionInfoResource.getJobExecution().getStatus().name());

}
项目:spring-cloud-dataflow    文件:DefaultTaskJobService.java   
/**
 * Retrieves Pageable list of {@link JobInstanceExecutions} from the JobRepository with a
 * specific jobName and matches the data with the associated JobExecutions.
 *
 * @param pageable enumerates the data to be returned.
 * @param jobName the name of the job for which to search.
 * @return List containing {@link JobInstanceExecutions}.
 */
@Override
public List<JobInstanceExecutions> listTaskJobInstancesForJobName(Pageable pageable, String jobName)
        throws NoSuchJobException {
    Assert.notNull(pageable, "pageable must not be null");
    Assert.notNull(jobName, "jobName must not be null");
    List<JobInstanceExecutions> taskJobInstances = new ArrayList<>();
    for (JobInstance jobInstance : jobService.listJobInstances(jobName, pageable.getOffset(),
            pageable.getPageSize())) {
        taskJobInstances.add(getJobInstanceExecution(jobInstance));
    }
    return taskJobInstances;
}
项目:spring-cloud-dataflow    文件:TaskExecutionControllerTests.java   
@Before
public void setupMockMVC() {
    this.mockMvc = MockMvcBuilders.webAppContextSetup(wac)
            .defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build();
    if (!initialized) {
        this.sampleArgumentList = new LinkedList<String>();
        this.sampleArgumentList.add("--password=foo");
        this.sampleArgumentList.add("password=bar");
        this.sampleArgumentList.add("org.woot.password=baz");
        this.sampleArgumentList.add("foo.bar=foo");
        this.sampleArgumentList.add("bar.baz = boo");
        this.sampleArgumentList.add("foo.credentials.boo=bar");
        this.sampleArgumentList.add("spring.datasource.username=dbuser");
        this.sampleArgumentList.add("spring.datasource.password=dbpass");

        this.sampleCleansedArgumentList = new LinkedList<String>();
        this.sampleCleansedArgumentList.add("--password=******");
        this.sampleCleansedArgumentList.add("password=******");
        this.sampleCleansedArgumentList.add("org.woot.password=******");
        this.sampleCleansedArgumentList.add("foo.bar=foo");
        this.sampleCleansedArgumentList.add("bar.baz = boo");
        this.sampleCleansedArgumentList.add("foo.credentials.boo=******");
        this.sampleCleansedArgumentList.add("spring.datasource.username=dbuser");
        this.sampleCleansedArgumentList.add("spring.datasource.password=******");

        taskDefinitionRepository.save(new TaskDefinition(TASK_NAME_ORIG, "demo"));
        dao.createTaskExecution(TASK_NAME_ORIG, new Date(), this.sampleArgumentList, "foobar");
        dao.createTaskExecution(TASK_NAME_ORIG, new Date(), this.sampleArgumentList, null);
        dao.createTaskExecution(TASK_NAME_FOO, new Date(), this.sampleArgumentList, null);
        TaskExecution taskExecution = dao.createTaskExecution(TASK_NAME_FOOBAR, new Date(), this.sampleArgumentList,
                null);
        JobInstance instance = jobRepository.createJobInstance(TASK_NAME_FOOBAR, new JobParameters());
        JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
        taskBatchDao.saveRelationship(taskExecution, jobExecution);
        initialized = true;
    }
}
项目:spring-cloud-dataflow    文件:JobExecutionControllerTests.java   
private void createSampleJob(String jobName, int jobExecutionCount, BatchStatus status) {
    JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
    TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList<String>(), null);
    JobExecution jobExecution = null;

    for (int i = 0; i < jobExecutionCount; i++) {
        jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
        taskBatchDao.saveRelationship(taskExecution, jobExecution);
        jobExecution.setStatus(status);
        if (BatchStatus.STOPPED.equals(status)) {
            jobExecution.setEndTime(new Date());
        }
        jobRepository.update(jobExecution);
    }
}
项目:spring-cloud-dataflow    文件:JobStepExecutionControllerTests.java   
private void createStepExecution(String jobName, String... stepNames) {
    JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
    JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
    for (String stepName : stepNames) {
        StepExecution stepExecution = new StepExecution(stepName, jobExecution, 1L);
        stepExecution.setId(null);
        jobRepository.add(stepExecution);
    }
    TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList<String>(), null);
    taskBatchDao.saveRelationship(taskExecution, jobExecution);
}
项目:spring-cloud-dataflow    文件:JobInstanceControllerTests.java   
private void createSampleJob(String jobName, int jobExecutionCount) {
    JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
    TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList<String>(), null);
    JobExecution jobExecution = null;

    for (int i = 0; i < jobExecutionCount; i++) {
        jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
        taskBatchDao.saveRelationship(taskExecution, jobExecution);
    }
}
项目:contestparser    文件:JobLauncherCommandLineRunner.java   
private JobParameters getNextJobParameters(Job job,
        JobParameters additionalParameters) {
    String name = job.getName();
    JobParameters parameters = new JobParameters();
    List<JobInstance> lastInstances = this.jobExplorer.getJobInstances(name, 0, 1);
    JobParametersIncrementer incrementer = job.getJobParametersIncrementer();
    Map<String, JobParameter> additionals = additionalParameters.getParameters();
    if (lastInstances.isEmpty()) {
        // Start from a completely clean sheet
        if (incrementer != null) {
            parameters = incrementer.getNext(new JobParameters());
        }
    }
    else {
        List<JobExecution> previousExecutions = this.jobExplorer
                .getJobExecutions(lastInstances.get(0));
        JobExecution previousExecution = previousExecutions.get(0);
        if (previousExecution == null) {
            // Normally this will not happen - an instance exists with no executions
            if (incrementer != null) {
                parameters = incrementer.getNext(new JobParameters());
            }
        }
        else if (isStoppedOrFailed(previousExecution) && job.isRestartable()) {
            // Retry a failed or stopped execution
            parameters = previousExecution.getJobParameters();
            // Non-identifying additional parameters can be removed to a retry
            removeNonIdentifying(additionals);
        }
        else if (incrementer != null) {
            // New instance so increment the parameters if we can
            parameters = incrementer.getNext(previousExecution.getJobParameters());
        }
    }
    return merge(parameters, additionals);
}
项目:eMonocot    文件:JobInstanceDaoImpl.java   
/**
 *
 * @param identifier
 *            the identifier of the job
 * @return a job execution
 */
public final JobInstance load(final Long identifier) {
    JobParameters jobParameters = getJobParameters(identifier);
    RowMapper<JobInstance> rowMapper = new JobInstanceRowMapper(jobParameters);
    JobInstance jobInstance = getJdbcTemplate()
            .queryForObject(
                    "SELECT JOB_INSTANCE_ID, JOB_NAME, VERSION from BATCH_JOB_INSTANCE where JOB_INSTANCE_ID = ?",
                    rowMapper, identifier);
    return jobInstance;
}
项目:eMonocot    文件:JobInstanceDaoImpl.java   
@Override
public List<JobInstance> list(Integer page, Integer size) {
    RowMapper<JobInstance> rowMapper = new JobInstanceRowMapper();
    if (size == null && page == null) {
        return getJdbcTemplate().query("SELECT JOB_INSTANCE_ID, JOB_NAME, VERSION from BATCH_JOB_INSTANCE", rowMapper);
    } else if (page == null) {
        return getJdbcTemplate().query("SELECT JOB_INSTANCE_ID, JOB_NAME, VERSION from BATCH_JOB_INSTANCE LIMIT ?", rowMapper,size);
    } else {
        return getJdbcTemplate().query("SELECT JOB_INSTANCE_ID, JOB_NAME, VERSION from BATCH_JOB_INSTANCE LIMIT ? OFFSET ?", rowMapper,size, page * size);
    }
}
项目:eMonocot    文件:JobInstanceDaoImpl.java   
/**
 *
 * @param jobInstance
 *            The jobExecution to save
 */
public final void save(final JobInstance jobInstance) {
    String jobKey = createJobKey(jobInstance.getJobParameters());
    getJdbcTemplate().update(
            "INSERT into BATCH_JOB_INSTANCE(JOB_INSTANCE_ID, JOB_NAME, VERSION, JOB_KEY)"
                    + " values (?, ?, ?, ?)", jobInstance.getId(),
                    jobInstance.getJobName(), jobInstance.getVersion(), jobKey);
    for (String key : jobInstance.getJobParameters().getParameters()
            .keySet()) {
        JobParameter jobParameter = jobInstance.getJobParameters()
                .getParameters().get(key);
        insertParameter(jobInstance.getId(), jobParameter.getType(), key,
                jobParameter.getValue());
    }
}
项目:eMonocot    文件:JobInstanceDaoImpl.java   
/**
 * @param resultSet
 *            Set the result set
 * @param rowNumber
 *            Set the row number
 * @throws SQLException
 *             if there is a problem
 * @return a job execution instance
 */
public final JobInstance mapRow(final ResultSet resultSet,
        final int rowNumber) throws SQLException {
    JobInstance jobInstance = new JobInstance(resultSet.getBigDecimal(
            "JOB_INSTANCE_ID").longValue(), jobParameters,
            resultSet.getString("JOB_NAME"));
    BigDecimal version = resultSet.getBigDecimal("VERSION");
    if (version != null) {
        jobInstance.setVersion(version.intValue());
    }
    return jobInstance;
}
项目:eMonocot    文件:DataManagementSupport.java   
/**
 *
 * @param jobInstance
 *            Set the job instance
 * @return a job execution
 */
public JobExecution createJobExecution(
        JobInstance jobInstance) {
    JobExecution jobExecution = new JobExecution(jobInstance);
    setUp.add(jobExecution);
    tearDown.push(jobExecution);
    return jobExecution;
}