/** * Taskletのエントリポイント * * @param contribution ステップの実行状態 * @param chunkContext チャンクの実行状態 * @return ステータス(終了) * @throws Exception 予期しない例外 */ @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { Map<String, Object> jobParameters = chunkContext.getStepContext().getJobParameters(); // ジョブ起動パラメータの取得 LOGGER.info("FirstTasklet has been executed. job param is {}", jobParameters); ExecutionContext executionContext = chunkContext.getStepContext() .getStepExecution() .getJobExecution() .getExecutionContext(); // ステップ間の情報引き継ぎはJobExecutionのExecutionContextを取得する。(StepExecutionのExecutionContextではダメ) executionContext.put("message", "foobar"); return RepeatStatus.FINISHED; // このステップはこれで終了 }
@Override @Transactional public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { Query<String> query = currentSession().createQuery(queryString, String.class); query.setMaxResults(1); query.setFirstResult(0); try { String result = query.getSingleResult(); chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext().put("record.identifier", result); contribution.setExitStatus(new ExitStatus("MORE_RESULTS")); } catch (NoResultException e) { contribution.setExitStatus(new ExitStatus("NO_MORE_RESULTS")); } return RepeatStatus.FINISHED; }
public final RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { Map<String,String> defaultValuesMap = new HashMap<String,String>(); if(this.defaultValues != null) { String values = defaultValues.substring(1, this.defaultValues.length() - 1); for(String defaultValue : values.split(",")) { if(defaultValue.indexOf("=") > -1) { String field = defaultValue.substring(0,defaultValue.indexOf("=")); String value = defaultValue.substring(defaultValue.indexOf("=") + 1, defaultValue.length()); defaultValuesMap.put(field,value); } } } chunkContext.getStepContext().getStepExecution() .getJobExecution().getExecutionContext().put(defaultValuesKey, defaultValuesMap); logger.debug("SETTING " + defaultValuesKey + " as " + defaultValuesMap); String names = fieldNames.substring(1, this.fieldNames.length() - 1); String[] fieldNamesArray = names.split(","); chunkContext.getStepContext().getStepExecution() .getJobExecution().getExecutionContext().put(fieldNamesKey, fieldNamesArray); logger.debug("SETTING " + fieldNamesKey + " as " + Arrays.toString(fieldNamesArray)); return RepeatStatus.FINISHED; }
/** * Executes the task as specified by the taskName with the associated * properties and arguments. * @param contribution mutable state to be passed back to update the current step execution * @param chunkContext contains the task-execution-id used by the listener. * @return Repeat status of FINISHED. */ @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { String tmpTaskName = this.taskName.substring(0, this.taskName.lastIndexOf('_')); List<String> args = this.arguments; ExecutionContext stepExecutionContext = chunkContext.getStepContext().getStepExecution(). getExecutionContext(); if(stepExecutionContext.containsKey("task-arguments")) { args = (List<String>) stepExecutionContext.get("task-arguments"); } long executionId = this.taskOperations.launch(tmpTaskName, this.properties, args); stepExecutionContext.put("task-execution-id", executionId); stepExecutionContext.put("task-arguments", args); waitForTaskToComplete(executionId); return RepeatStatus.FINISHED; }
@Bean public Step step1() { return stepBuilderFactory.get("step1") .tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { // get path of file in src/main/resources Path xmlDocPath = Paths.get(getFilePath()); // process the file to json String json = processXML2JSON(xmlDocPath); // insert json into mongodb insertToMongo(json); return RepeatStatus.FINISHED; } }).build(); }
@Override public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) { Object parametersObj = chunkContext.getStepContext().getJobParameters().get("JobParameters"); String jobParameters = String.valueOf(System.currentTimeMillis()); String params = "("; if (parametersObj != null) { jobParameters = parametersObj.toString(); String[] p = jobParameters.split(" "); for (int i = 0; i < p.length; i++) { if (i == 0) { params += "'" + p[i] + "'"; } else { params += ",'" + p[i] + "'"; } } } params += ")"; try { logger.info("program is :" + scriptFile + ", argument is:" + params); jdbcTemplate.execute("call " + scriptFile + params); } catch (Exception e) { logger.error(e.getMessage()); chunkContext.getStepContext().getStepExecution().setExitStatus(ExitStatus.FAILED); } return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { logger.info("Started MappingParserTask"); // Delete the files that are error out in previous run Arrays.stream(new File(propertyConstants.getReportDir()).listFiles()).forEach(File::delete); logger.debug("Found Mapping file:: " + propertyConstants.getMappingJsonPath()); // Create the mapping between Hub and Fortify final List<BlackDuckFortifyMapperGroup> groupMap = mappingParser.createMapping(propertyConstants.getMappingJsonPath()); logger.info("blackDuckFortifyMappers :" + groupMap.toString()); // Create the threads for parallel processing ExecutorService exec = Executors.newFixedThreadPool(propertyConstants.getMaximumThreadSize()); List<Future<?>> futures = new ArrayList<>(groupMap.size()); for (BlackDuckFortifyMapperGroup blackDuckFortifyMapperGroup : groupMap) { futures.add(exec.submit(new BlackDuckFortifyPushThread(blackDuckFortifyMapperGroup, hubServices, fortifyFileTokenApi, fortifyUploadApi, propertyConstants))); } for (Future<?> f : futures) { f.get(); // wait for a processor to complete } jobStatus = true; logger.info("After all threads processing"); return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { final Stopwatch stopwatch = Stopwatch.createStarted(); final Path tempFile = Files.createTempFile(FILE_PREFIX, FILE_SUFFIX); try { final Path path = lupahallintaHttpClient.downloadClubCSV(tempFile); LOG.info("Download completed successfully in {}, inputFile={}", stopwatch, path); chunkContext.getStepContext() .getStepExecution() .getJobExecution() .getExecutionContext() .put(KEY_INPUT_FILE, path.toString()); } catch (final Exception ex) { Files.deleteIfExists(tempFile); throw ex; } return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { final MetsastajaRekisteriJobParameters jobParameters = new MetsastajaRekisteriJobParameters( chunkContext.getStepContext().getJobParameters()); final Path sourcePath = Paths.get(jobParameters.getInputFile()); final File sourceFile = sourcePath.toFile(); final UUID uuid = UUID.randomUUID(); LOG.info("Archiving file with UUID={} filename={}", uuid, sourceFile.getName()); fileStorageService.storeFile( uuid, sourceFile, FileType.METSASTAJAREKISTERI, "application/octet-stream", sourceFile.getName()); Files.delete(sourcePath); return RepeatStatus.FINISHED; }
@Before public void init() throws Exception { this.context.register(BatchConfiguration.class); this.context.refresh(); JobRepository jobRepository = this.context.getBean(JobRepository.class); this.jobLauncher = this.context.getBean(JobLauncher.class); this.jobs = new JobBuilderFactory(jobRepository); PlatformTransactionManager transactionManager = this.context .getBean(PlatformTransactionManager.class); this.steps = new StepBuilderFactory(jobRepository, transactionManager); this.step = this.steps.get("step").tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return null; } }).build(); this.job = this.jobs.get("job").start(this.step).build(); this.jobExplorer = this.context.getBean(JobExplorer.class); this.runner = new JobLauncherCommandLineRunner(this.jobLauncher, this.jobExplorer); this.context.getBean(BatchConfiguration.class).clear(); }
@Test public void retryFailedExecutionOnNonRestartableJob() throws Exception { this.job = this.jobs.get("job").preventRestart() .start(this.steps.get("step").tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { throw new RuntimeException("Planned"); } }).build()).incrementer(new RunIdIncrementer()).build(); this.runner.execute(this.job, new JobParameters()); this.runner.execute(this.job, new JobParameters()); // A failed job that is not restartable does not re-use the job params of // the last execution, but creates a new job instance when running it again. assertThat(this.jobExplorer.getJobInstances("job", 0, 100)).hasSize(2); }
@Test public void retryFailedExecutionWithNonIdentifyingParameters() throws Exception { this.job = this.jobs.get("job") .start(this.steps.get("step").tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { throw new RuntimeException("Planned"); } }).build()).incrementer(new RunIdIncrementer()).build(); JobParameters jobParameters = new JobParametersBuilder().addLong("id", 1L, false) .addLong("foo", 2L, false).toJobParameters(); this.runner.execute(this.job, jobParameters); this.runner.execute(this.job, jobParameters); assertThat(this.jobExplorer.getJobInstances("job", 0, 100)).hasSize(1); }
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { if (init) { init(); log.info("the batch process inits completely."); init = false; } try { contribution.setExitStatus(mapResult(execute(chunkContext))); Thread.sleep(getSleepTime()); } catch (Exception e) { log.error(e.getMessage(), e); } finally { synchronized (lock) { if (stop) { lock.notifyAll(); log.info("business is executed compeletly!"); return RepeatStatus.FINISHED; } } } return RepeatStatus.CONTINUABLE; }
@Override public RepeatStatus execute(StepContribution arg0, ChunkContext arg1) throws Exception { Long time = Calendar.getInstance().getTimeInMillis(); String exit; if(time % 2 == 0){ exit = ExitStatus.NOOP.getExitCode().toString(); arg0.setExitStatus(ExitStatus.NOOP); }else{ exit = ExitStatus.COMPLETED.getExitCode().toString(); arg0.setExitStatus(ExitStatus.COMPLETED); } System.out.println("Executing step with name " + taskletName + " and exitCode " + exit); return RepeatStatus.FINISHED; }
@Test public void retryFailedExecutionOnNonRestartableJob() throws Exception { this.job = this.jobs.get("job").preventRestart() .start(this.steps.get("step").tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { throw new RuntimeException("Planned"); } }).build()).incrementer(new RunIdIncrementer()).build(); this.runner.execute(this.job, new JobParameters()); this.runner.execute(this.job, new JobParameters()); // A failed job that is not restartable does not re-use the job params of // the last execution, but creates a new job instance when running it again. assertEquals(2, this.jobExplorer.getJobInstances("job", 0, 100).size()); }
@Test public void retryFailedExecutionWithNonIdentifyingParameters() throws Exception { this.job = this.jobs.get("job") .start(this.steps.get("step").tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { throw new RuntimeException("Planned"); } }).build()).incrementer(new RunIdIncrementer()).build(); JobParameters jobParameters = new JobParametersBuilder().addLong("id", 1L, false) .addLong("foo", 2L, false).toJobParameters(); this.runner.execute(this.job, jobParameters); this.runner.execute(this.job, jobParameters); assertEquals(1, this.jobExplorer.getJobInstances("job", 0, 100).size()); }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { BufferedInputStream bufferedInputStream = new BufferedInputStream(inputFile.getInputStream()); char[] buffer = new char[2]; InputStreamReader inputStreamReader = new InputStreamReader(bufferedInputStream); int numChars = -1; if((numChars = inputStreamReader.read(buffer)) == 2) { String string = new String(buffer); if(string.equals("[]")) { contribution.setExitStatus(new ExitStatus("EMPTY_RESPONSE").addExitDescription("The webservice returned an empty list of taxa")); } } else { contribution.setExitStatus(ExitStatus.FAILED.addExitDescription("Unable to read the webservice response")); } inputStreamReader.close(); return RepeatStatus.FINISHED; }
/** * @param contribution Set the step contribution * @param chunkContext Set the chunk context * @return the repeat status * @throws Exception if there is a problem deleting the resources */ public final RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { UUID uuid1 = UUID.randomUUID(); String temporaryFileName = harvesterSpoolDirectory + File.separator + uuid1.toString() + ".xml"; File temporaryFile = new File(temporaryFileName); ExecutionContext executionContext = chunkContext.getStepContext() .getStepExecution().getJobExecution().getExecutionContext(); executionContext.put("startindex", 0); executionContext.put("temporary.file.name", temporaryFile.getAbsolutePath()); executionContext.putLong("job.execution.id", chunkContext.getStepContext().getStepExecution().getJobExecutionId()); return RepeatStatus.FINISHED; }
/** * @param contribution Set the step contribution * @param chunkContext Set the chunk context * @return the repeat status * @throws Exception if there is a problem deleting the resources */ public final RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { UUID uuid = UUID.randomUUID(); String unpackDirectoryName = harvesterSpoolDirectory + File.separator + uuid.toString(); String temporaryFileName = harvesterSpoolDirectory + File.separator + uuid.toString() + ".zip"; File unpackDirectory = new File(unpackDirectoryName); unpackDirectory.mkdir(); File temporaryFile = new File(temporaryFileName); ExecutionContext executionContext = chunkContext.getStepContext() .getStepExecution().getJobExecution().getExecutionContext(); executionContext.put("temporary.file.name",temporaryFile.getAbsolutePath()); executionContext.put("unpack.directory.name",unpackDirectory.getAbsolutePath()); executionContext.putLong("job.execution.id", chunkContext.getStepContext().getStepExecution().getJobExecutionId()); return RepeatStatus.FINISHED; }
/** * @param contribution * Set the step contribution * @param chunkContext * Set the chunk context * @return the repeat status * @throws Exception * if there is a problem */ public final RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { // Set up input documents Source inputXML = new StreamSource(inputFile.getFile()); Source inputXSL = new StreamSource(xsltFile.getFile()); // Set up output sink Result outputXHTML = new StreamResult(outputFile.getFile()); // Setup a factory for transforms TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(inputXSL); for (String parameterName : parameters.keySet()) { transformer.setParameter(parameterName, parameters.get(parameterName)); } if(errorListener != null) { transformer.setErrorListener(errorListener); } transformer.transform(inputXML, outputXHTML); return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { CoreAdminResponse coreAdminResponse = CoreAdminRequest.getStatus(core, solrServer); NamedList<Object> index = (NamedList<Object>)coreAdminResponse.getCoreStatus(core).get("index"); Integer segmentCount = (Integer)index.get("segmentCount"); if(segmentCount < maxSegments) { logger.debug("Core " + core + " only has " + segmentCount + " segments, skipping optimization"); } else { logger.debug("Core " + core + " has " + segmentCount + " segments, starting optimization"); solrServer.optimize(true, true); logger.debug("Core " + core + " optimized"); } return RepeatStatus.FINISHED; }
@Override @Transactional public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { Query query = this.getSession().createQuery(queryString); query.setMaxResults(1); query.setFirstResult(0); List<String> results = (List<String>)query.list(); if(results.size() == 0) { contribution.setExitStatus(new ExitStatus("NO_MORE_RESULTS")); } else { chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext().put("record.identifier", results.get(0)); contribution.setExitStatus(new ExitStatus("MORE_RESULTS")); } return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { if (marklogicVersion.equals("8")) { QueryManager qryMgr = databaseClient.newQueryManager(); GenericDocumentManager docMgr = databaseClient.newDocumentManager(); SearchHandle handle = qryMgr.search(queryDefinition, new SearchHandle()); MatchDocumentSummary[] results = handle.getMatchResults(); for (int i = 0; i < results.length; i++) { docMgr.delete(results[i].getUri()); } } else if (marklogicVersion.equals("9")) { final DataMovementManager dataMovementManager = databaseClient.newDataMovementManager(); QueryBatcher qb = dataMovementManager.newQueryBatcher(queryDefinition) .withBatchSize(2500) .withConsistentSnapshot() .onUrisReady(new DeleteListener()) .onQueryFailure(throwable -> throwable.printStackTrace()); JobTicket ticket = dataMovementManager.startJob(qb); qb.awaitCompletion(); dataMovementManager.stopJob(ticket); } return RepeatStatus.FINISHED; }
/** * @param contribution Set the step contribution * @param chunkContext Set the chunk context * @return the repeat status * @throws Exception if there is a problem deleting the resources */ public final RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { UUID uuid = UUID.randomUUID(); String unpackDirectoryName = harvesterSpoolDirectory + File.separator + uuid.toString(); String temporaryFileName = harvesterSpoolDirectory + File.separator + uuid.toString() + ".zip"; File unpackDirectory = new File(unpackDirectoryName); unpackDirectory.mkdir(); File temporaryFile = new File(temporaryFileName); ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); executionContext.put("temporary.file.name", temporaryFile.getAbsolutePath()); executionContext.put("unpack.directory.name", unpackDirectory.getAbsolutePath()); executionContext.putLong("job.execution.id", chunkContext.getStepContext().getStepExecution().getJobExecutionId()); log.debug("setting temporary.file.name to {}", temporaryFile.getAbsolutePath()); log.debug("execution context: {}", executionContext); return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { String queryString = "INSERT INTO Annotation (annotatedObjId, annotatedObjType, jobId, dateTime, authority_id, resource_id, type, code, recordType) " + "SELECT o.id, :annotatedObjType, :jobId, now(), :authorityId, :resourceId, 'Warn', 'Absent', :annotatedObjType " + "FROM " + annotatedObjType + " o " + "WHERE o.authority_id = :authorityId AND o.resource_id = :resourceId"; stepExecution.getJobExecution().getExecutionContext().putLong("job.execution.id", stepExecution.getJobExecutionId()); Map<String, ? extends Object> queryParameters = ImmutableMap.of( "authorityId", getAuthorityId(), "resourceId", resourceId, "jobId", stepExecution.getJobExecutionId(), "annotatedObjType", annotatedObjType); logger.debug("Annotating: {} with params {}", queryString, queryParameters); jdbcTemplate.update(queryString, queryParameters); return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(StepContribution arg0, ChunkContext chunkcontext) throws Exception { String resource_id = (String) chunkcontext.getStepContext().getStepExecution() .getJobExecution().getJobParameters().getString("resource_id"); taxonDeleter.Delete(sessionFactory, Taxon.class, resource_id, "Taxon", solrClient); descriptionDeleter.Delete(sessionFactory, Description.class, resource_id, "Description", solrClient); conceptDeleter.Delete(sessionFactory, Concept.class, resource_id, "Concept", solrClient); distributionDeleter.Delete(sessionFactory, Distribution.class, resource_id, "Distribution", solrClient); identifierDeleter.Delete(sessionFactory, Identifier.class, resource_id, "Identifier", solrClient); imageDeleter.Delete(sessionFactory, Image.class, resource_id, "Image", solrClient); measurementorFactDeleter.Delete(sessionFactory, MeasurementOrFact.class, resource_id, "MeasurementOrFact", solrClient); referenceDeleter.Delete(sessionFactory, Reference.class, resource_id, "Reference", solrClient); typeandspecimenDeleter.Delete(sessionFactory, TypeAndSpecimen.class, resource_id, "TypeAndSpecimen", solrClient); vernacularnameDeleter.Delete(sessionFactory, VernacularName.class, resource_id, "VernacularName", solrClient); return null; }
@SuppressWarnings("unchecked") @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { CoreAdminResponse coreAdminResponse = CoreAdminRequest.getStatus(core, solrClient); NamedList<Object> index = (NamedList<Object>)coreAdminResponse.getCoreStatus(core).get("index"); Integer segmentCount = (Integer)index.get("segmentCount"); if(segmentCount < maxSegments) { logger.debug("Core " + core + " only has " + segmentCount + " segments, skipping optimization"); } else { logger.debug("Core " + core + " has " + segmentCount + " segments, starting optimization"); solrClient.optimize(true, true); logger.debug("Core " + core + " optimized"); } return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception { try { final int[] ret = this.transactionTemplate.execute(new TransactionCallback<int[]>() { @Override public int[] doInTransaction(final TransactionStatus status) { return doTask(chunkContext); } }); } catch (final TransactionException e) { log.error(e.getMessage(), e); throw e;//FAIL job status } return RepeatStatus.FINISHED; }
/** * Execute the {@link #setSql(String) SQL query} provided. If the query starts with "select" (case insensitive) the * result is a list of maps, which is logged and added to the step execution exit status. Otherwise the query is * executed and the result is an indication, also in the exit status, of the number of rows updated. */ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { StepExecution stepExecution = chunkContext.getStepContext().getStepExecution(); ExitStatus exitStatus = stepExecution.getExitStatus(); String msg = ""; if (StringUtils.hasText(sql)) { msg = runCommand(chunkContext.getStepContext(), sql); } else if (!CollectionUtils.isEmpty(scripts)) { msg = runScripts(chunkContext, scripts, null); } stepExecution.setExitStatus(exitStatus.addExitDescription(msg)); return RepeatStatus.FINISHED; }
@Test public void testIgnoreEmptyFile() throws Exception { File aTmpFile = File.createTempFile("input-copy-", ".csv", new File("target/")); FileCopyTasklet aTasklet = new FileCopyTasklet(); aTasklet.setIgnoreEmptyFile(true); aTasklet.setOrigin(new FileSystemResource(aTmpFile)); aTasklet.setDestination(new FileSystemResource("target/input-copyEmpty.csv")); StepContribution aStepContribution = mock(StepContribution.class); aTasklet.execute(aStepContribution, null); verify(aStepContribution, times(0)).incrementReadCount(); verify(aStepContribution, times(0)).incrementWriteCount(1); assertFalse(aTasklet.getDestination().getFile().exists()); assertTrue(aTasklet.getOrigin().getFile().exists()); assertEquals(0, aTasklet.getOrigin().getFile().length()); }
@Test public void testExecuteRemove() throws Exception { FileUtils.copyFile(new File("src/test/resources/testFiles/input.csv"), new File("target/CP-input.csv")); FilesOperationTasklet aTasklet = new FilesOperationTasklet(); ResourcesFactory aSourceFactory = mock(ResourcesFactory.class); Resource aFileResource1 = mock(Resource.class); when(aFileResource1.getFile()).thenReturn(new File("target/CP-input.csv")); when(aFileResource1.exists()).thenReturn(true); when(aSourceFactory.getResources(anyMapOf(Object.class, Object.class))).thenReturn(new Resource[]{aFileResource1}); aTasklet.setSourceFactory(aSourceFactory); aTasklet.setOperation(Operation.REMOVE); aTasklet.afterPropertiesSet(); StepContribution aStepContribution = mock(StepContribution.class); assertEquals(RepeatStatus.FINISHED, aTasklet.execute(aStepContribution, null)); verify(aStepContribution, times(1)).incrementReadCount(); verify(aStepContribution, times(1)).incrementWriteCount(1); assertFalse(aFileResource1.getFile().exists()); }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { Long executeTime = (Long) chunkContext.getStepContext().getJobParameters().get("executeTime"); log.debug("********************** Snapshot Generation Run ************************"); log.debug("Execute time: " + new Date(executeTime).toString()); log.debug("*****************************************************************************"); List<SnapshotWindowDO> snapshots = snapshotWindowDao.findUnexecutedSnapshots(); for (SnapshotWindowDO snapshotWindowDO : snapshots) { reportsService.createSnapshotRollup(snapshotWindowDO.getSnapshotWindowId(), true); } log.debug("********************** Snapshot Generation Run --Complete-- ************************"); log.debug("Finish time: " + new Date(System.currentTimeMillis()).toString()); log.debug("*****************************************************************************"); return RepeatStatus.FINISHED; }
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { JobParameters jobParameters = chunkContext.getStepContext().getStepExecution().getJobParameters(); System.out.println("Hello Spring Batch!"); if (jobParameters != null && !jobParameters.isEmpty()) { final Set<Entry<String, JobParameter>> parameterEntries = jobParameters.getParameters().entrySet(); System.out.println(LINE); System.out.println(String.format("The following %s Job Parameter(s) is/are present:", parameterEntries.size())); System.out.println(LINE); for (Entry<String, JobParameter> jobParameterEntry : parameterEntries) { System.out.println(String.format( "Parameter name: %s; isIdentifying: %s; type: %s; value: %s", jobParameterEntry.getKey(), jobParameterEntry.getValue().isIdentifying(), jobParameterEntry.getValue().getType().toString(), jobParameterEntry.getValue().getValue())); } System.out.println(LINE); } return RepeatStatus.FINISHED; }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { RetryTemplate retryTemplate = new RetryTemplate(); SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); retryPolicy.setMaxAttempts(3); retryTemplate.setRetryPolicy(retryPolicy); // HINT: 재시도 정책을 지정해서 3번까지 재시도를 수행합니다. // List<Discount> discounts = retryTemplate.execute(new RetryCallback<List<Discount>>() { @Override public List<Discount> doWithRetry(RetryContext context) throws Exception { return discountService.getDiscounts(); } }); discountsHolder.setDiscounts(discounts); return RepeatStatus.FINISHED; }
private Step createTaskletStepWithListener(final String taskName, StepExecutionListener stepExecutionListener) { return this.steps.get(taskName) .tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return RepeatStatus.FINISHED; } }) .transactionAttribute(getTransactionAttribute()) .listener(stepExecutionListener) .build(); }
private Step createTaskletStep(final String taskName) { return this.steps.get(taskName) .tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return RepeatStatus.FINISHED; } }) .transactionAttribute(getTransactionAttribute()) .build(); }