@Test public void testCreateGenericArchive() throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); JobParameters jobParameters = new JobParameters(parameters); Job palmwebArchive = jobLocator.getJob("PalmWeb"); assertNotNull("Palmweb must not be null", palmwebArchive); JobExecution jobExecution = jobLauncher.run(palmwebArchive, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount()); } }
public void start() throws IOException, InterruptedException { List<JobExecution> jobExecutions = new ArrayList<>(); // launch jobs jobExecutions.addAll(IntStream.range(0, this.cardinality).mapToObj(i -> { Job analysisJob = this.jobFactory.get(); JobParametersBuilder jobParametersBuilder = new JobParametersBuilder(); jobParametersBuilder.addString("id", analysisJob.getName() + "-" + i, true); try { return this.jobLauncher.run(analysisJob, jobParametersBuilder.toJobParameters()); } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException | JobParametersInvalidException exception) { throw new RuntimeException(exception); } }).collect(Collectors.toList())); // wait for termination while (jobExecutions.stream().anyMatch(jobExecution -> jobExecution.getStatus().isRunning())) { Thread.sleep(1000); } }
@RequestMapping("/launch") public String launch() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { //JobParametersの内容を変更しないと同一のジョブ実行と思われるっぽい。 //同一のジョブ実行だと思われたら二回目からは実行されない。 //(一回目の実行が既に完了しているので) //とりあえずIDっぽいものを持たせて実行の要求の度にインクリメントすることで //何度も実行できるようになった。 //cf. JobParametersIncrementer JobParameters jobParameters = new JobParametersBuilder().addLong( "simpleBatchId", idGenerator.getAndIncrement()) .toJobParameters(); JobExecution execution = launcher.run(job, jobParameters); return execution.toString(); }
@Override public void launch(JobLaunchRequest request) { Job job; try { job = jobLocator.getJob(request.getJob()); Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>(); for(String parameterName : request.getParameters().keySet()) { jobParameterMap.put(parameterName, new JobParameter(request.getParameters().get(parameterName))); } JobParameters jobParameters = new JobParameters(jobParameterMap); try { jobLauncher.run(job, jobParameters); } catch (JobExecutionAlreadyRunningException jeare) { jobStatusNotifier.notify(new JobExecutionException(jeare.getLocalizedMessage()), request.getParameters().get("resource.identifier")); } catch (JobRestartException jre) { jobStatusNotifier.notify(new JobExecutionException(jre.getLocalizedMessage()), request.getParameters().get("resource.identifier")); } catch (JobInstanceAlreadyCompleteException jiace) { jobStatusNotifier.notify(new JobExecutionException(jiace.getLocalizedMessage()), request.getParameters().get("resource.identifier")); } catch (JobParametersInvalidException jpie) { jobStatusNotifier.notify(new JobExecutionException(jpie.getLocalizedMessage()), request.getParameters().get("resource.identifier")); } } catch (NoSuchJobException nsje) { jobStatusNotifier.notify(new JobExecutionException(nsje.getLocalizedMessage()), request.getParameters().get("resource.identifier")); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("query.string", new JobParameter("select i from Image i")); JobParameters jobParameters = new JobParameters(parameters); Job job = jobLocator.getJob("ImageProcessing"); assertNotNull("ImageProcessing must not be null", job); JobExecution jobExecution = jobLauncher.run(job, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } }
@Override public void launch(JobLaunchRequest request) { Job job; try { job = jobLocator.getJob(request.getJob()); Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>(); for(String parameterName : request.getParameters().keySet()) { jobParameterMap.put(parameterName, new JobParameter(request.getParameters().get(parameterName))); } JobParameters jobParameters = new JobParameters(jobParameterMap); jobLauncher.run(job, jobParameters); } catch (NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException | JobParametersInvalidException exception) { jobStatusNotifier.notify(new JobExecutionException(exception.getLocalizedMessage()), request.getParameters().get("job.configuration.id")); } }
private JobExecution launchImmediately(TaskExecutor taskExecutor, Job job, JobParameters jobParameters) { try { SimpleJobLauncher launcher = new SimpleJobLauncher(); launcher.setJobRepository(jobRepository); launcher.setTaskExecutor(taskExecutor); return launcher.run(job, jobParameters); } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException | JobParametersInvalidException e) { logger.error("Unexpected exception", e); throw YonaException.unexpected(e); } }
public static String whitelist(Exception exception) { if (exception instanceof JobInstanceAlreadyCompleteException) { return "Job instance already complete exception"; } else if (exception instanceof JsonProcessingException) { return "Json processing exception"; } else if (exception instanceof IOException) { return "IO exception"; } else if (exception instanceof TransactionException) { return "Transaction exception"; } else if (exception instanceof DataAccessException) { return "Data access exception"; } else if (exception instanceof SQLException) { return "SQL exception"; } return exception.getMessage(); }
public static void main(String[] args) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, DuplicateJobException { ConfigurableApplicationContext ctx = SpringApplication.run(Application.class, args); Job simpleJob = ctx.getBean("simpleJob", Job.class); JobRegistry jobRegistry = ctx.getBean("jobRegistry", JobRegistry.class); jobRegistry.register(new ReferenceJobFactory(simpleJob)); //JobRepository jobRepository = ctx.getBean("jobRepository", JobRepository.class); //JobInstance jobInstance = jobRepository.createJobInstance("simpleJob", new JobParameters()); // JobParameters jobParameters = ctx.getBean("basicParameters", JobParameters.class); // //JobRegistry jobRegistry = ctx.getBean("mapJobRegistry", JobRegistry.class); // jobRegistry.register(); // jobLauncher.run(job, jobParameters); }
@Test public void restartable() throws Exception { when(taskletForRestartableJob.execute(any(StepContribution.class), any(ChunkContext.class))) .thenThrow(new RuntimeException()) .thenReturn(RepeatStatus.FINISHED); JobParameters jobParameters = new JobParametersBuilder().addLong("date", System.currentTimeMillis()).toJobParameters(); JobExecution exec = jobLauncher.run(restartableJob, jobParameters); assertThat(exec.getStatus()).isEqualTo(BatchStatus.FAILED); exec = jobLauncher.run(restartableJob, jobParameters); assertThat(exec.getStatus()).isEqualTo(BatchStatus.COMPLETED); try { exec = jobLauncher.run(restartableJob, jobParameters); Assert.fail("job 인스턴스는 이미 완료되었습니다. 완료된 놈을 실행시키면 JobInstanceAlreadyCompleteException이 발생해야 합니다."); } catch (JobInstanceAlreadyCompleteException e) { // OK } }
protected void execute(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, JobParametersNotFoundException { JobParameters nextParameters = getNextJobParameters(job, jobParameters); if (nextParameters != null) { JobExecution execution = this.jobLauncher.run(job, nextParameters); if (this.publisher != null) { this.publisher.publishEvent(new JobExecutionEvent(execution)); } } }
/** * Runs the scheduled job. * * @throws JobParametersInvalidException * @throws JobInstanceAlreadyCompleteException * @throws JobRestartException * @throws JobExecutionAlreadyRunningException */ @Scheduled(fixedRate = RATE) public void run() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { JobParameters params = new JobParameters(new HashMap<String, JobParameter>() { private static final long serialVersionUID = 1L; { put("execDate", new JobParameter(new Date(), true)); } }); jobLauncher.run(job, params); }
@Test public void shouldTaskletWork() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Map<String, JobParameter> params = Maps.newHashMap(); params.put("test", new JobParameter("przodownik")); params.put("time", new JobParameter(new Date())); JobExecution execution = jobLauncher.run(simpleStringProcessorTask, new JobParameters(params)); log.info("Exit Status : {}", execution.getExitStatus()); Assert.assertEquals(ExitStatus.COMPLETED, execution.getExitStatus()); }
@RequestMapping("/job3/{input_file_name}") @ResponseBody String requestJob3(@PathVariable("input_file_name") String inputFileName) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException{ JobParametersBuilder jobParametersBuilder = new JobParametersBuilder(); jobParametersBuilder.addString("INPUT_FILE_PATH", inputFileName); jobParametersBuilder.addLong("TIMESTAMP",new Date().getTime()); jobLauncher.run(job3, jobParametersBuilder.toJobParameters()); return "Job3!"; }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testMatchTaxa() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingListener.indexObjects(taxa); tx.commit(); ClassPathResource input = new ClassPathResource("/org/emonocot/job/taxonmatch/input.csv"); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("assume.accepted.matches", new JobParameter(Boolean.TRUE.toString())); parameters.put("input.file", new JobParameter(input.getFile().getAbsolutePath())); parameters.put("output.file", new JobParameter(File.createTempFile("output", "csv").getAbsolutePath())); JobParameters jobParameters = new JobParameters(parameters); Job taxonMatchingJob = jobLocator.getJob("TaxonMatching"); assertNotNull("TaxonMatching must not be null", taxonMatchingJob); JobExecution jobExecution = jobLauncher.run(taxonMatchingJob, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); FileReader file = new FileReader(jobParameters.getParameters().get("output.file").getValue().toString()); BufferedReader reader = new BufferedReader(file); assertNotNull("There should be an output file", reader); String ln; while ((ln = reader.readLine()) != null) { logger.debug(ln); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingListener.indexObjects(taxa); tx.commit(); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("authority.name", new JobParameter("test")); String repository = properties.getProperty("test.resource.baseUrl"); parameters.put("authority.uri", new JobParameter(repository + "iucn.json")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((IUCNJobIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job job = jobLocator.getJob("IUCNImport"); assertNotNull("IUCNImport must not be null", job); JobExecution jobExecution = jobLauncher.run(job, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount()); } }
@Test public void testCreateGenericArchive() throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("specs.file", new JobParameter(specsFile.getFile().getAbsolutePath())); parameters.put("items.file", new JobParameter(itemsFile.getFile().getAbsolutePath())); parameters.put("taxon.file", new JobParameter(taxonFile.getFile().getAbsolutePath())); parameters.put("fields.terminated.by", new JobParameter(",")); parameters.put("fields.enclosed.by", new JobParameter("\"")); parameters.put("output.fields",new JobParameter("taxonID,description,language,license")); parameters.put("taxon.file.skip.lines", new JobParameter("1")); parameters.put("taxon.file.field.names", new JobParameter("taxonID,subfamily,subtribe,tribe,accessRights,bibliographicCitation,created,license,modified,references,rights,rightsHolder,acceptedNameUsage,acceptedNameUsageID,class,datasetID,datasetName,family,genus,infraspecificEpithet,kingdom,nameAccordingTo,namePublishedIn,namePublishedInID,namePublishedInYear,nomenclaturalCode,nomenclaturalStatus,order,originalNameUsage,originalNameUsageID,parentNameUsage,parentNameUsageID,phylum,scientificName,scientificNameAuthorship,scientificNameID,specificEpithet,subgenus,taxonRank,taxonRemarks,taxonomicStatus,verbatimTaxonRank")); parameters.put("taxon.file.delimiter", new JobParameter("\t")); parameters.put("taxon.file.quote.character", new JobParameter("\"")); parameters.put("description.file.field.names", new JobParameter("taxonID,description,license,language,type,rights")); parameters.put("description.default.values", new JobParameter("license=http://creativecommons.org/licenses/by-nc-sa/3.0,language=EN,type=general,rights=© Copyright The Board of Trustees\\, Royal Botanic Gardens\\, Kew.")); parameters.put("archive.file", new JobParameter(UUID.randomUUID().toString())); JobParameters jobParameters = new JobParameters(parameters); Job deltaToDwC = jobLocator.getJob("DeltaToDwC"); assertNotNull("DeltaToDwC must not be null", deltaToDwC); JobExecution jobExecution = jobLauncher.run(deltaToDwC, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount()); } }
@Test public void testCreateGenericArchive() throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("species.specs.file", new JobParameter(speciesSpecsFile.getFile().getAbsolutePath())); parameters.put("species.items.file", new JobParameter(speciesItemsFile.getFile().getAbsolutePath())); parameters.put("species.taxon.file", new JobParameter(speciesTaxonFile.getFile().getAbsolutePath())); parameters.put("genera.specs.file", new JobParameter(generaSpecsFile.getFile().getAbsolutePath())); parameters.put("genera.items.file", new JobParameter(generaItemsFile.getFile().getAbsolutePath())); parameters.put("genera.taxon.file", new JobParameter(generaTaxonFile.getFile().getAbsolutePath())); parameters.put("images.file", new JobParameter(imagesFile.getFile().getAbsolutePath())); JobParameters jobParameters = new JobParameters(parameters); Job deltaToDwC = jobLocator.getJob("Grassbase"); assertNotNull("Grassbase must not be null", deltaToDwC); JobExecution jobExecution = jobLauncher.run(deltaToDwC, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingListener.indexObjects(taxa); tx.commit(); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("family", new JobParameter("Araceae")); parameters.put("authority.name", new JobParameter("test")); String repository = properties.getProperty("test.resource.baseUrl", "http://build.e-monocot.org/git/?p=emonocot.git;a=blob_plain;f=emonocot-harvest/src/test/resources/org/emonocot/job/common/"); parameters.put("authority.uri", new JobParameter(repository + "list.xml")); //parameters.put("authority.uri", new JobParameter("http://data.gbif.org/ws/rest/occurrence/list?taxonconceptkey=6979&maxresults=1000&typesonly=true&format=darwin&mode=raw&startindex=")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((GBIFJobIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job job = jobLocator.getJob("GBIFImport"); assertNotNull("GBIFImport must not be null", job); JobExecution jobExecution = jobLauncher.run(job, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("query.string", new JobParameter( "select t.id from Taxon t join t.parentNameUsage")); JobParameters jobParameters = new JobParameters(parameters); Job job = jobLocator .getJob("CalculateDerivedProperties"); assertNotNull("CalculateDerivedProperties must not be null", job); JobExecution jobExecution = jobLauncher.run(job, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testImportTaxa() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("authority.name", new JobParameter( "test")); parameters.put("family", new JobParameter( "Araceae")); parameters.put("taxon.processing.mode", new JobParameter("IMPORT_TAXA_BY_AUTHORITY")); String repository = properties.getProperty("test.resource.baseUrl"); parameters.put("authority.uri", new JobParameter(repository + "dwc.zip")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((TaxonImportingIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job darwinCoreArchiveHarvestingJob = jobLocator.getJob("DarwinCoreArchiveHarvesting"); assertNotNull("DarwinCoreArchiveHarvesting must not be null", darwinCoreArchiveHarvestingJob); JobExecution jobExecution = jobLauncher.run(darwinCoreArchiveHarvestingJob, jobParameters); assertEquals("The job should complete successfully","COMPLETED",jobExecution.getExitStatus().getExitCode()); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } //Test namePublishedIn is saved // assertNotNull("The namePublishedIn should have been saved.", // referenceService.find("urn:example.com:test:ref:numerouno")); }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testImportTaxa() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("authority.name", new JobParameter( "test")); parameters.put("family", new JobParameter( "Araceae")); parameters.put("key.processing.mode", new JobParameter("IMPORT_KEYS")); parameters.put("taxon.processing.mode", new JobParameter("IMPORT_TAXA_BY_AUTHORITY")); parameters.put("authority.uri", new JobParameter("http://build.e-monocot.org/oldtest/test.zip")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((IdentificationKeyImportingIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job darwinCoreArchiveHarvestingJob = jobLocator .getJob("DarwinCoreArchiveHarvesting"); assertNotNull("DarwinCoreArchiveHarvesting must not be null", darwinCoreArchiveHarvestingJob); JobExecution jobExecution = jobLauncher.run(darwinCoreArchiveHarvestingJob, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingListener.indexObjects(taxa); tx.commit(); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("authority.name", new JobParameter("test")); parameters.put("input.file.extension", new JobParameter("nwk")); parameters.put("root.taxon.identifier", new JobParameter("urn:kew.org:wcs:taxon:16026")); String repository = properties.getProperty("test.resource.baseUrl"); parameters.put("authority.uri", new JobParameter(repository + "test.nwk")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((PhylogeneticTreeJobIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job identificationKeyHarvestingJob = jobLocator.getJob("PhylogeneticTreeHarvesting"); assertNotNull("PhylogeneticTreeHarvesting must not be null", identificationKeyHarvestingJob); JobExecution jobExecution = jobLauncher.run(identificationKeyHarvestingJob, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } List<Annotation> annotations = session.createQuery("from Annotation a").list(); for(Annotation a : annotations) { logger.info(a.getJobId() + " " + a.getRecordType() + " " + a.getType() + " " + a.getCode() + " " + a.getText()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("query.string", new JobParameter("select t.id from Taxon t")); parameters.put("query.type", new JobParameter("org.emonocot.model.Taxon")); parameters.put("solr.selected.facets", new JobParameter("base.class_s=org.emonocot.model.Taxon")); JobParameters jobParameters = new JobParameters(parameters); Job job = jobLocator .getJob("ReIndex"); assertNotNull("ReIndex must not be null", job); JobExecution jobExecution = jobLauncher.run(job, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingListener.indexObjects(taxa); tx.commit(); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("authority.name", new JobParameter("test")); parameters.put("root.taxon.identifier", new JobParameter("urn:kew.org:wcs:taxon:16026")); String repository = properties.getProperty("test.resource.baseUrl"); parameters.put("authority.uri", new JobParameter(repository + "testXperKey.xml")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((XperIdentificationKeyJobIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job identificationKeyHarvestingJob = jobLocator.getJob("IdentificationKeyHarvesting"); assertNotNull("IdentificationKeyHarvesting must not be null", identificationKeyHarvestingJob); JobExecution jobExecution = jobLauncher.run(identificationKeyHarvestingJob, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } List<Annotation> annotations = session.createQuery("from Annotation a").list(); for(Annotation a : annotations) { logger.info(a.getJobId() + " " + a.getRecordType() + " " + a.getType() + " " + a.getCode() + " " + a.getText()); } }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingListener.indexObjects(taxa); tx.commit(); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("authority.name", new JobParameter("test")); parameters.put("root.taxon.identifier", new JobParameter("urn:kew.org:wcs:taxon:16026")); String repository = properties.getProperty("test.resource.baseUrl"); parameters.put("authority.uri", new JobParameter(repository + "testKey.xml")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((IdentificationKeyJobIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job identificationKeyHarvestingJob = jobLocator.getJob("IdentificationKeyHarvesting"); assertNotNull("IdentificationKeyHarvesting must not be null", identificationKeyHarvestingJob); JobExecution jobExecution = jobLauncher.run(identificationKeyHarvestingJob, jobParameters); assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount()); } List<Annotation> annotations = session.createQuery("from Annotation a").list(); for(Annotation a : annotations) { logger.info(a.getJobId() + " " + a.getRecordType() + " " + a.getType() + " " + a.getCode() + " " + a.getText()); } }
@Test(expected = JobInstanceAlreadyCompleteException.class) public void throwJobInstanceAlreadyCompleteExceptionTest() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { JobParametersBuilder builder = new JobParametersBuilder(); builder.addLong("longValue", 123L, true); JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), builder.toJobParameters()); jobExecution.setStatus(BatchStatus.COMPLETED); jobExecution.setEndTime(new Date(6)); jobRepository.update(jobExecution); jobRepository.createJobExecution(job.getName(), builder.toJobParameters()); }
/** * * @throws IOException * if a temporary file cannot be created. * @throws NoSuchJobException * if SpeciesPageHarvestingJob cannot be located * @throws JobParametersInvalidException * if the job parameters are invalid * @throws JobInstanceAlreadyCompleteException * if the job has already completed * @throws JobRestartException * if the job cannot be restarted * @throws JobExecutionAlreadyRunningException * if the job is already running */ @Test public final void testNotModifiedResponse() throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException { Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); List<Taxon> taxa = session.createQuery("from Taxon as taxon").list(); solrIndexingInterceptor.indexObjects(taxa); tx.commit(); Map<String, JobParameter> parameters = new HashMap<String, JobParameter>(); parameters.put("family", new JobParameter("Araceae")); parameters.put("authority.name", new JobParameter("test")); parameters.put("authority.uri", new JobParameter(mockHttpUrl + "/list.xml")); parameters.put("resource.id", new JobParameter("123")); parameters.put("authority.last.harvested", new JobParameter(Long.toString((GBIFJobIntegrationTest.PAST_DATETIME.getMillis())))); JobParameters jobParameters = new JobParameters(parameters); Job job = jobLocator.getJob("GBIFImport"); assertNotNull("GBIFImport must not be null", job); JobExecution jobExecution = jobLauncher.run(job, jobParameters); if(jobExecution.getExitStatus().getExitCode() == "FAILED"){ logger.info("Errors: " + jobExecution.getFailureExceptions()); } assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED"); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { logger.info(stepExecution.getStepName() + " " + stepExecution.getReadCount() + " " + stepExecution.getFilterCount() + " " + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount()); } }
@Test public void testJob() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, InterruptedException{ // should be zero Assert.assertEquals(0, getHits()); JobParametersBuilder pb = new JobParametersBuilder(); pb.addString("contactInfo", DIRECTORY + "contactInfo.txt"); pb.addString("biobank", DIRECTORY + "biobank.txt"); pb.addString("sampleCollection", DIRECTORY + "sampleCollection.txt"); pb.addString("study", DIRECTORY + "study.txt"); pb.addString("sample", DIRECTORY + "sample.txt"); pb.addString("map","example.mapping.properties"); //Elasticsearch config pb.addString("cluster.nodes", "localhost:9300"); pb.addString("cluster.name", "elasticsearch"); pb.addString("index.name", "sample"); JobExecution launch = jobLauncher.run(job, pb.toJobParameters()); while(launch.isRunning()){ Thread.sleep(5000); } Thread.sleep(5000); Assert.assertEquals(3, getHits()); }
@RequestMapping("/executions/{executionId}/restart") @ResponseBody public JobExecution restartExecution(@PathVariable("executionId") final long executionId) throws JobInstanceAlreadyCompleteException, NoSuchJobExecutionException, NoSuchJobException, JobRestartException, JobParametersInvalidException, JobExecutionAlreadyRunningException { final long restartedExecutionId = batchOperator.restart(executionId); return execution(restartedExecutionId); }
@Override public long restart(final long executionId) throws JobInstanceAlreadyCompleteException, NoSuchJobExecutionException, NoSuchJobException, JobRestartException, JobParametersInvalidException, JobExecutionAlreadyRunningException { final JobExecution jobExecution = execution(executionId); final String jobName = jobExecution.getJobInstance().getJobName(); final Job job = jobRegistry.getJob(jobName); final JobParameters parameters = jobExecution.getJobParameters(); LOG.info("Attempting to resume job with name={} and parameters={}", jobName, parameters); return jobLauncher.run(job, parameters).getId(); }
protected void startJobs(long year, long month) { try { JobParameters jobParameters = new JobParametersBuilder() .addLong("month", month) .addLong("year", year) .toJobParameters(); LOG.info("Running job in jobservice"); jobLauncher.run(employeeJob, jobParameters); } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException | JobParametersInvalidException e) { LOG.error("Job running failed", e); //TODO shouldn't we handle this differently? } }
@Test public void whenStarJobs_withGivenYearAndMonth_runJobWithParameters() throws JobParametersInvalidException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { taxCalculatorJobService.runTaxCalculatorJob(new JobStartParams(YEAR, MONTH)); verify(jobLauncherMock).run(any(Job.class), jobParametersArgumentCaptor.capture()); JobParameters jobParameters = jobParametersArgumentCaptor.getValue(); assertThat(jobParameters.getLong("year")).isEqualTo(YEAR); assertThat(jobParameters.getLong("month")).isEqualTo(MONTH); }
public JobExecution restart(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobException, JobParametersInvalidException { return simpleJobService.restart(jobExecutionId); }