@RequestMapping(method = { RequestMethod.GET, RequestMethod.POST }) protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { Map<String, Object> map = new HashMap<String, Object>(); String query = request.getParameter("query"); if (query != null) { map.put("query", query); try { List<?> result = daoHelper.getJdbcTemplate().query(query, new ColumnMapRowMapper()); map.put("result", result); } catch (DataAccessException x) { map.put("error", ExceptionUtils.getRootCause(x).getMessage()); } } return new ModelAndView("db","model",map); }
/** * Decide when the job should start run in first time * @return Seconds for the Job to start */ public int getDelay() { try { JobRunInfo lastRun = jobInfoStore.getLatestRun(this.identifier); if (lastRun != null && lastRun.isSucceed()) { Period period = new Period(new DateTime(lastRun.getStartTime()), DateTime.now(DateTimeZone.UTC)); if (period.toStandardSeconds().getSeconds() < this.interval) { return (int) (this.interval - period.toStandardSeconds().getSeconds()); } } } catch (Exception ex) { logger.error(ExceptionUtils.getRootCauseMessage(ex)); logger.error(ExceptionUtils.getFullStackTrace(ex)); } return random.nextInt(Configuration.getProperties().getInt("job_random_delay", 60)); }
@Override public JobRunInfo getLatestRun(String jobType) throws Exception { String path = String.format("%s/job/%s/latestrun", zkPath, jobType); ensureZkPathExists(path); byte[] data = ZkClient.getClient().getData().forPath(path); if (data != null && data.length > 0) { try { return mapper.readValue(IOUtils.toString(data, "UTF-8"), JobRunInfo.class); } catch (Exception e) { logger.error("Fail to read last run. Error {}", ExceptionUtils.getRootCauseMessage(e)); return null; } } else { return null; } }
private Boolean calculateDailyInstanceCounts() { try { DateTime utcNow = DateTime.now(DateTimeZone.UTC); List<Instance> instances = cloudInstanceStore.getInstances(region); List<ReservedInstances> reservedInstances = cloudInstanceStore.getReservedInstances(region); // Generate instance counts per type per Availability zone List<EsInstanceCountRecord> instanceCountRecords = getInstanceCountRecords(instances, reservedInstances, utcNow); logger.info("Number of instance count records {}", instanceCountRecords.size()); // Insert records into soundwave store. instanceCounterStore.bulkInsert(instanceCountRecords); logger.info("Bulk insert succeeded for instance count records"); return true; } catch (Exception e) { logger.error(ExceptionUtils.getRootCauseMessage(e)); return false; } }
private void checkQueueLength() { try { GetQueueAttributesResult result = sqsClient.getQueueAttributes(queueUrl, Arrays.asList(QUEUELENGTHATTR, QUEUEINVISIBLEATTR)); Map<String, String> attrs = result.getAttributes(); if (attrs.containsKey(QUEUELENGTHATTR)) { Stats.addMetric(StatsUtil.getStatsName("healthcheck", "ec2queue_length"), Integer.parseInt(attrs.get(QUEUELENGTHATTR))); logger.info("Ec2 queue length is {}", attrs.get(QUEUELENGTHATTR)); } if (attrs.containsKey(QUEUEINVISIBLEATTR)) { Stats.addMetric(StatsUtil.getStatsName("healthcheck", "ec2queue_in_processing"), Integer.parseInt(attrs.get("ApproximateNumberOfMessagesNotVisible"))); logger.info("Ec2 queue in processing length is {}", attrs.get(QUEUEINVISIBLEATTR)); } } catch (Exception ex) { logger.warn(ExceptionUtils.getRootCauseMessage(ex)); logger.warn(ExceptionUtils.getFullStackTrace(ex)); } }
private void deleteMessage(String msgType, String msgId) { try { if ("namespace".equals(msgType)) { nsMessageProcessor.processNSDeleteMsg(msgId); } else { if ("cm_ci".equals(msgType)) { msgType = "ci"; // relationMsgProcessor.processRelationDeleteMsg(msgId); //Delete all relation docs for given ci indexer.getTemplate().delete(indexer.getIndexName(), ".percolator", msgId);//TEMP code: Till ciClassName is available try to delete all ciIds from percolator type also JsonObject object = new JsonObject(); object.add("timestamp", new JsonPrimitive(new Date().getTime())); object.add("ciId", new JsonPrimitive(msgId)); indexer.indexEvent("ci_delete", object.toString()); } else if ("cm_ci_rel".equals(msgType)){ return; // no longer deal with relation messages } indexer.getTemplate().delete(indexer.getIndexByType(msgType), msgType, msgId); logger.info("Deleted message with id::" + msgId + " and type::" + msgType + " from ES index:"+indexer.getIndexByType(msgType)); } } catch (Exception e) { logger.error(">>>>>>>>Error in deleteMessage() ESMessageProcessorfor type :" + msgType+ " ::msgId :"+ msgId +"::" + ExceptionUtils.getMessage(e), e); } }
protected boolean isInterrupt(Throwable e) { if (!running) { return true; } if (e instanceof InterruptedException || e instanceof ZkInterruptedException) { return true; } if (ExceptionUtils.getRootCause(e) instanceof InterruptedException) { return true; } return false; }
@Override public void perform() throws IOException { try { this.original.perform(); } catch (final IOException | RuntimeException ex) { final Issue created = this.github.repos() .get(new Coordinates.Simple("amihaiemil/comdor")) .issues() .create( "Exception occured while peforming an Action!", String.format( "@amihaiemil Something went wrong, please have a look." + "\n\n[Here](%s) are the logs of the Action.", this.original.log().location() ) + "\n\nHere is the exception:\n\n```\n\n" + ExceptionUtils.getStackTrace(ex) + "\n\n```" ); this.original.log().logger().info( "Opened Issue https://github.com/amihaiemil/comdor/issues/" + created.number() ); } }
/** * Haal een lijst van items op. * @param bijhoudingsautorisatieId bijhoudingsautorisatie ID * @param parameters request parameters * @param pageable paginering * @return lijst van item (inclusief paginering en sortering) */ @RequestMapping(value = "/{id}/bijhoudingsautorisatieSoortAdministratieveHandelingen", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) public final Page<BijhoudingsautorisatieSoortAdministratieveHandelingView> listSoortAdministratieveHandelingen( @PathVariable("id") final Integer bijhoudingsautorisatieId, @RequestParam final Map<String, String> parameters, @PageableDefault(size = 1000) final Pageable pageable) { return getReadonlyTransactionTemplate().execute(status -> { try { final Bijhoudingsautorisatie bijhoudingsautorisatie = get(bijhoudingsautorisatieId); // Aangezien de page die we terugkrijgen uit de repository immutable is, maken we een nieuwe lijst aan om // vervolgens een nieuw page object aan te maken met de betreffende subset van de lijst. final List<BijhoudingsautorisatieSoortAdministratieveHandelingView> schermSoorten = bepaalActiefStatusSoortAdministratieveHandelingen(bijhoudingsautorisatie); final int fromIndex = pageable.getOffset(); final int toIndex = (fromIndex + pageable.getPageSize()) > schermSoorten.size() ? schermSoorten.size() : fromIndex + pageable.getPageSize(); return new PageImpl<>(schermSoorten.subList(fromIndex, toIndex), pageable, schermSoorten.size()); } catch (NotFoundException exception) { LOG.error(ExceptionUtils.getFullStackTrace(exception)); return null; } }); }
public static TargetMatchScoring LibraryMatchRead(String Filename, String LibID) throws FileNotFoundException { if (!new File(FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + "_" + LibID + "_LibMatch.serFS").exists()) { return null; } TargetMatchScoring match = null; try { Logger.getRootLogger().info("Loading Target library match results to file:" + FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + "_" + LibID + "_LibMatch.serFS..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + "_" + LibID + "_LibMatch.serFS"); FSTObjectInput in = new FSTObjectInput(fileIn); match = (TargetMatchScoring) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } return match; }
public static TargetMatchScoring LibraryMatchReadJS(String Filename, String LibID) throws FileNotFoundException { if (!new File(FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + "_" + LibID + "_LibMatch.serFS").exists()) { return null; } TargetMatchScoring match = null; try { Logger.getRootLogger().info("Loading Target library match results to file:" + FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + "_" + LibID + "_LibMatch.ser..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + "_" + LibID + "_LibMatch.ser"); ObjectInputStream in = new ObjectInputStream(fileIn); match = (TargetMatchScoring) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } return match; }
private boolean FoundInInclusionMZList(float rt, float mz) { if (InclusionRT.PointCount() == 0) { return false; } float lowrt = rt - parameter.MaxCurveRTRange; float highrt = rt + parameter.MaxCurveRTRange; float lowmz = InstrumentParameter.GetMzByPPM(mz, 1, PPM); float highmz = InstrumentParameter.GetMzByPPM(mz, 1, -PPM); Object[] found = null; try { found = InclusionRange.range(new double[]{lowrt, lowmz}, new double[]{highrt, highmz}); } catch (KeySizeException ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); } if (found != null && found.length > 0) { return true; } return false; }
private boolean FoundInInclusionList(float mz, float startrt, float endrt) { if (InclusionRT.PointCount() == 0) { return false; } float lowmz = InstrumentParameter.GetMzByPPM(mz, 1, PPM); float highmz = InstrumentParameter.GetMzByPPM(mz, 1, -PPM); float lowrt = startrt - parameter.RTtol; float highrt = endrt + parameter.RTtol; Object[] found = null; try { found = InclusionRange.range(new double[]{lowrt, lowmz}, new double[]{highrt, highmz}); } catch (KeySizeException ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); } if (found != null && found.length > 0) { for (Object point : found) { InclusionFound.put((XYData) point, true); } return true; } return false; }
public static FastaParser FasterSerialzationRead(String Filename) throws FileNotFoundException { if (!new File(FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + ".FastaSer").exists()) { return null; } FastaParser fastareader = null; try { org.apache.log4j.Logger.getRootLogger().info("Loading fasta serialization to file:" + FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + ".FastaSer.."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(Filename) + FilenameUtils.getBaseName(Filename) + ".FastaSer"); FSTObjectInput in = new FSTObjectInput(fileIn); fastareader = (FastaParser) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { org.apache.log4j.Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } return fastareader; }
public PepXMLParser(LCMSID singleLCMSID, String FileName, float threshold, boolean CorrectMassDiff) throws ParserConfigurationException, SAXException, IOException, XmlPullParserException { this.singleLCMSID = singleLCMSID; this.CorrectMassDiff = CorrectMassDiff; this.FileName = FileName; this.threshold = threshold; Logger.getRootLogger().info("Parsing pepXML: " + FileName + "...."); try { ParseSAX(); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); Logger.getRootLogger().info("Parsing pepXML: " + FileName + " failed. Trying to fix the file..."); insert_msms_run_summary(new File(FileName)); ParseSAX(); } //System.out.print("done\n"); }
private static FragmentLibManager FSFragmentLibRead(String path, String LibID1) { if (!new File(path + LibID1 + ".serFS").exists()) { Logger.getRootLogger().debug(path + LibID1 + ".serFS does not exsit."); return null; } try { Logger.getRootLogger().info("Reading spectral library from file:" + path + LibID1 + ".serFS..."); FileInputStream fileIn = new FileInputStream(path + LibID1 + ".serFS"); FSTObjectInput in = new FSTObjectInput(fileIn); FragmentLibManager FragLib = (FragmentLibManager) in.readObject(); in.close(); fileIn.close(); return FragLib; } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } }
private static FragmentLibManager FSFragmentLibRead_Old(String path, String LibID1) { if (!new File(path + LibID1 + ".serFS").exists()) { Logger.getRootLogger().debug(path + LibID1 + ".serFS does not exsit."); return null; } try { Logger.getRootLogger().info("Reading internal spectral library from file:" + path + LibID1 + ".serFS..."); FileInputStream fileIn = new FileInputStream(path + LibID1 + ".serFS"); org.nustaq_old.serialization.FSTObjectInput in = new org.nustaq_old.serialization.FSTObjectInput(fileIn); FragmentLibManager FragLib = (FragmentLibManager) in.readObject(); in.close(); fileIn.close(); return FragLib; } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } }
private boolean JavaSerializationPeakClusterRead() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.ser").exists()) { return false; } try { Logger.getRootLogger().info("Reading PeakCluster serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.ser..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.ser"); ObjectInputStream in = new ObjectInputStream(fileIn); PeakClusters = (ArrayList<PeakCluster>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private boolean FS_PeakClusterRead() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.serFS").exists()) { return false; } try { Logger.getRootLogger().info("Reading PeakCluster serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.serFS..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.serFS"); FSTObjectInput in = new FSTObjectInput(fileIn); PeakClusters = (ArrayList<PeakCluster>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); if (FS_PeakClusterRead_Old()) { WritePeakClusterSerialization(); return true; } Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private boolean FS_PeakClusterRead_Old() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.serFS").exists()) { return false; } try { Logger.getRootLogger().info("Old PeakCluster serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.serFS..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.serFS"); org.nustaq_old.serialization.FSTObjectInput in = new org.nustaq_old.serialization.FSTObjectInput(fileIn); PeakClusters = (ArrayList<PeakCluster>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error("Old version reader still failed."); Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private boolean FSCluster2CurveRead() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_Clus2Cur.serFS").exists()) { return false; } try { Logger.getRootLogger().debug("Reading PrecursorFragmentCorr serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_Clus2Cur.serFS..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_Clus2Cur.serFS"); FSTObjectInput in = new FSTObjectInput(fileIn); FragmentsClu2Cur = (HashMap<Integer, ArrayList<PrecursorFragmentPairEdge>>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private boolean JavaSerializationCluster2CurveRead() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_Clus2Cur.ser").exists()) { return false; } try { Logger.getRootLogger().debug("Reading PrecursorFragmentCorr serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_Clus2Cur.ser..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_Clus2Cur.ser"); ObjectInputStream in = new ObjectInputStream(fileIn); FragmentsClu2Cur = (HashMap<Integer, ArrayList<PrecursorFragmentPairEdge>>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private boolean FSCluster2CurveUnfragRead() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_UnfClus2Cur.serFS").exists()) { return false; } try { Logger.getRootLogger().debug("Reading UnfragPrecursorFragCorr serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_UnfClus2Cur.serFS..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_UnfClus2Cur.serFS"); FSTObjectInput in = new FSTObjectInput(fileIn); UnFragIonClu2Cur = (HashMap<Integer, ArrayList<PrecursorFragmentPairEdge>>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private boolean JavaSerializationCluster2CurveUnfragRead() { if (!new File(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_UnfClus2Cur.ser").exists()) { return false; } try { Logger.getRootLogger().debug("Reading UnfragPrecursorFragCorr serialization from file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_UnfClus2Cur.ser..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_UnfClus2Cur.ser"); ObjectInputStream in = new ObjectInputStream(fileIn); UnFragIonClu2Cur = (HashMap<Integer, ArrayList<PrecursorFragmentPairEdge>>) in.readObject(); in.close(); fileIn.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
public static DIA_Setting ReadDIASettingSerialization(String filepath) { if (!new File(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + "_diasetting.ser").exists()) { return null; } try { Logger.getRootLogger().debug("Reading DIA setting from file:" + FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + "_diasetting.ser..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + "_diasetting.ser"); ObjectInputStream in = new ObjectInputStream(fileIn); DIA_Setting setting = (DIA_Setting) in.readObject(); in.close(); fileIn.close(); return setting; } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } }
public static InstrumentParameter ReadParametersSerialization(String filepath) { if (!new File(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + "_params.ser").exists()) { return null; } try { Logger.getRootLogger().info("Reading parameters from file:" + FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + "_params.ser..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + "_params.ser"); ObjectInputStream in = new ObjectInputStream(fileIn); InstrumentParameter params = (InstrumentParameter) in.readObject(); in.close(); fileIn.close(); return params; } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } }
public ProtID CloneProtein() { ProtID newprotein = new ProtID(); try { if (Sequence != null) { newprotein.SetSequence(Sequence); } else { Logger.getRootLogger().error("Sequence of protein:" + getAccNo() + " is null"); } } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); } newprotein.Description = Description; newprotein.Mass = Mass; newprotein.setAccNo(getAccNo()); newprotein.UniProtID = UniProtID; newprotein.IndisProteins.add(getAccNo()); return newprotein; }
private boolean FSWrite(String filepath, String tag) { try { if (!tag.equals("")) { tag = "_" + tag; } Logger.getRootLogger().info("Writing ID results to file:" + FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + tag + "_LCMSID.serFS..."); FileOutputStream fout = new FileOutputStream(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + tag + "_LCMSID.serFS", false); FSTObjectOutput out = new FSTObjectOutput(fout); ReduceMemoryUsage(); out.writeObject(this, LCMSID.class); out.close(); fout.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return false; } return true; }
private static LCMSID FS_Read(String filepath, String tag) throws Exception { if (!tag.equals("")) { tag = "_" + tag; } if (!new File(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + tag + "_LCMSID.serFS").exists()) { return null; } try { Logger.getRootLogger().info("Reading ID results from file:" + FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + tag + "_LCMSID.serFS..."); FileInputStream fileIn = new FileInputStream(FilenameUtils.getFullPath(filepath) + FilenameUtils.getBaseName(filepath) + tag + "_LCMSID.serFS"); FSTObjectInput in = new FSTObjectInput(fileIn); LCMSID lcmsid = (LCMSID) in.readObject(LCMSID.class); in.close(); fileIn.close(); return lcmsid; } catch (Exception ex) { Logger.getRootLogger().info("Reading LCMSID FS results failed."); Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); return null; } }
@SneakyThrows public void saveResponse(Throwable throwable) { createFuture.setHandler(res -> { try { Audit audit = res.result(); audit.setResponseCode(context.response().getStatusCode()) .setError(ExceptionUtils.getStackTrace(throwable)); persist(audit, this::notifyAuditResult); } catch (Exception e) { log.error("Fail to save response"); } }); }
/** * <p>Checks if the throwable was caused by timeout exception.</p> * <b>This method has been tested for Oracle and MySQL only and might not work * for other DB engines.</b> * * @param throwable to check * @return true if the throwable is caused by a timeout, false otherwise */ public boolean isCausedByTimeoutException(Throwable throwable) { // Valid test for Oracle timeout exception and some (not all!) MySQL // exceptions. if (ExceptionUtils.indexOfType(throwable, SQLTimeoutException.class) != -1) { return true; } // MySQL database has two timeout exceptions in two packages. One of them // doesn't extend SQLTimeoutException but only SQLException. It is therefore // necessary to do ugly name check... for (Throwable causeThrowable : ExceptionUtils.getThrowables(throwable)) { if (MYSQL_TIMEOUT_EXCEPTION_NAME.equals(causeThrowable.getClass().getSimpleName())) { return true; } } return false; }
protected void handleTimerException(RuntimeException ex) { if (ExceptionUtils.indexOfType(ex, java.net.ConnectException.class) > -1) { // If a ConnectException occurred, just log it and ignore log.warn("onTimer error: " + ex.getMessage()); } else { // Otherwise throw the exception, but first search for NoUserSessionException in chain, // if found - stop the timer int reIdx = ExceptionUtils.indexOfType(ex, RemoteException.class); if (reIdx > -1) { RemoteException re = (RemoteException) ExceptionUtils.getThrowableList(ex).get(reIdx); for (RemoteException.Cause cause : re.getCauses()) { //noinspection ThrowableResultOfMethodCallIgnored if (cause.getThrowable() instanceof NoUserSessionException) { log.warn("NoUserSessionException in timer, timer will be stopped"); disposeTimer(); break; } } } else if (ExceptionUtils.indexOfThrowable(ex, NoUserSessionException.class) > -1) { log.warn("NoUserSessionException in timer, timer will be stopped"); disposeTimer(); } throw ex; } }
/** * Returns the payment status that corresponds to the given error */ public PaymentStatus toStatus(final Throwable error) { if (error instanceof InvalidCredentialsException) { return PaymentStatus.INVALID_CREDENTIALS; } else if (error instanceof BlockedCredentialsException) { return PaymentStatus.BLOCKED_CREDENTIALS; } else if (error instanceof InvalidUserForChannelException) { return PaymentStatus.INVALID_CHANNEL; } else if (error instanceof NotEnoughCreditsException) { return PaymentStatus.NOT_ENOUGH_CREDITS; } else if (error instanceof UpperCreditLimitReachedException) { return PaymentStatus.RECEIVER_UPPER_CREDIT_LIMIT_REACHED; } else if (error instanceof MaxAmountPerDayExceededException) { return PaymentStatus.MAX_DAILY_AMOUNT_EXCEEDED; } else if (error instanceof IllegalArgumentException || error instanceof ValidationException || error instanceof UnexpectedEntityException || error instanceof EntityNotFoundException || error instanceof UserNotFoundException) { return PaymentStatus.INVALID_PARAMETERS; } else if (ExceptionUtils.indexOfThrowable(error, DataIntegrityViolationException.class) != -1) { return PaymentStatus.INVALID_PARAMETERS; } else { return PaymentStatus.UNKNOWN_ERROR; } }
public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { MessageResources resources = ((MessageResources) request.getAttribute(Globals.MESSAGES_KEY)); try { DetalleFuenteDatosForm detalleFuenteDatos = ( DetalleFuenteDatosForm ) form; FuenteDatosDelegate delegate = DelegateUtil.getFuenteDatosDelegate(); delegate.altaFilaFuenteDatos(detalleFuenteDatos.getIdentificador()); response.sendRedirect("detalleFuenteDatos.do?identificador=" + detalleFuenteDatos.getIdentificador()); return null; } catch (Exception ex) { String keyError = "fuenteDatos.errorPKNuevaFila"; // Controlamos si es una excepcion de PK if (ExceptionUtils.getRootCauseMessage(ex).indexOf("PK-Exception") != -1) { keyError = "fuenteDatos.errorPK"; } request.setAttribute("message",resources.getMessage( getLocale( request ), keyError)); return mapping.findForward( "fail" ); } }
@Authenticated @Override public String jpqlExecuteUpdate(String queryString, boolean softDeletion) { try { Transaction tx = persistence.createTransaction(); try { EntityManager em = persistence.getEntityManager(); em.setSoftDeletion(softDeletion); Query query = em.createQuery(queryString); int count = query.executeUpdate(); tx.commit(); return "Done: " + count + " entities affected, softDeletion=" + softDeletion; } finally { tx.end(); } } catch (Throwable e) { log.error("jpqlExecuteUpdate error", e); return ExceptionUtils.getStackTrace(e); } }
private boolean processException(YuGongContext context, Table table, RecordExtractor extractor, Throwable e) { if (ExceptionUtils.getRootCause(e) instanceof InterruptedException) { // interrupt事件,响应退出 logger.info("table[{}] is interrpt ,current status:{} !", table .getFullName(), extractor.status()); return true; } else if (OracleInstance.this.isStop()) { return true; } else { logger.error("retry, something error happened. caused by {}", ExceptionUtils.getFullStackTrace(e)); logger.info("table[{}] is error , current status:{} !", table .getFullName(), extractor.status()); try { Thread.sleep(retryInterval); } catch (InterruptedException e1) { exception = new YuGongException(e1); Thread.currentThread().interrupt(); return true; } } return false; }
protected void setUploadingErrorHandler() { setErrorHandler(event -> { //noinspection ThrowableResultOfMethodCallIgnored Throwable ex = event.getThrowable(); String rootCauseMessage = ExceptionUtils.getRootCauseMessage(ex); Logger log = LoggerFactory.getLogger(CubaFileUpload.class); if (StringUtils.contains(rootCauseMessage, "The multipart stream ended unexpectedly") || StringUtils.contains(rootCauseMessage, "Unexpected EOF read on the socket")) { log.warn("Unable to upload file, it seems upload canceled or network error occurred"); } else { log.error("Unexpected error in CubaFileUpload", ex); } if (isUploading) { endUpload(); } }); }
/** * Logs the stack trace to the log files with original exception that has * not been handled and also forwards the request to the errorPage. * * @param request The request object * @param response The response object * @param chain The filterChain object * @throws IOException The java.io.IOException * @throws ServletException The javax.servlet.ServletException */ @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { try { logger.info(String.format("The current request thread is %s.", Thread.currentThread())); chain.doFilter(request, response); } catch (Throwable exception) { logger.error("There was an exception ", exception); String rootCauseMessage = ExceptionUtils.getRootCauseMessage(exception); request.setAttribute("errorMessage", rootCauseMessage); if (exception instanceof RuntimeException) { logger.error("A Runtime Exception has occurred. The cause is " + rootCauseMessage, exception); throw new RuntimeException(); } if (exception instanceof Exception) { logger.error("An Exception has occurred. The cause is " + rootCauseMessage, exception); } else { logger.error("An Error has occurred. The cause is " + rootCauseMessage, exception); } throw new RuntimeException(); } }
public String executeComponent(String formData) { JSONObject formDataJson = JSONObject.fromObject(formData); String type; String id; String directory; try { type = formDataJson.getString("type"); id = formDataJson.getString("id"); directory = formDataJson.getString("dir"); } catch (Exception ex) { throw new IncompleteFormDataException(ExceptionUtils.getRootCauseMessage(ex)); } Map<String, String> parameters = new HashMap<>(); parameters.put("id", id); parameters.put("type", type); parameters.put("dir", directory); ControllerUtils.checkForNullsAndEmptyParameters(parameters); File efwdFile = ApplicationUtilities.getEfwdFile(directory); JSONObject fileAsJson = getJsonOfEfwd(efwdFile); JSONObject efwdConnection = getEfwdConnection(fileAsJson, id, type); return efwdConnection.toString(); }
/** * Inserts a TN for a transfer with the specified trace number, for the current service client * @return true if the TN was inserted */ private boolean insertTN(final Long clientId, final String traceNumber) { return transactionHelper.runInNewTransaction(new TransactionCallback<Boolean>() { @Override public Boolean doInTransaction(final TransactionStatus status) { final TraceNumber tn = new TraceNumber(); tn.setDate(Calendar.getInstance()); tn.setClientId(clientId); tn.setTraceNumber(traceNumber); try { traceNumberDao.insert(tn); return true; } catch (DaoException e) { status.setRollbackOnly(); if (ExceptionUtils.indexOfThrowable(e, DataIntegrityViolationException.class) != -1) { // the unique constraint was violated - It means the trace number was already stored by a payment or by other reverse. // If it was inserted by a payment then we must reverse it. // If was inserted by other reverse then just ignore it. return false; } else { throw e; } } } }); }