Java 类org.apache.commons.lang.mutable.MutableDouble 实例源码

项目:LCIndex-HBase-0.94.16    文件:SchemaMetrics.java   
/**
 * Used to accumulate store metrics across multiple regions in a region
 * server.  These metrics are not "persistent", i.e. we keep overriding them
 * on every update instead of incrementing, so we need to accumulate them in
 * a temporary map before pushing them to the global metric collection.
 * @param tmpMap a temporary map for accumulating store metrics
 * @param storeMetricType the store metric type to increment
 * @param val the value to add to the metric
 */
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
    StoreMetricType storeMetricType, double val) {
  final String key = getStoreMetricName(storeMetricType);
  if (tmpMap.get(key) == null) {
    tmpMap.put(key, new MutableDouble(val));
  } else {
    tmpMap.get(key).add(val);
  }

  if (this == ALL_SCHEMA_METRICS) {
    // also compute the max value across all Stores on this server
    final String maxKey = getStoreMetricNameMax(storeMetricType);
    MutableDouble cur = tmpMap.get(maxKey);
    if (cur == null) {
      tmpMap.put(maxKey, new MutableDouble(val));
    } else if (cur.doubleValue() < val) {
      cur.setValue(val);
    }
  } else {
    ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
  }
}
项目:apex-malhar    文件:AverageAlertingOperator.java   
private void processTuple(KeyValPair<MerchantKey, Long> tuple)
{
  MerchantKey merchantKey = tuple.getKey();
  MutableDouble lastSma = lastSMAMap.get(tuple.getKey());
  long txValue = tuple.getValue();
  if (lastSma != null && txValue > lastSma.doubleValue()) {
    double lastSmaValue = lastSma.doubleValue();
    double change = txValue - lastSmaValue;
    if (change > threshold) { // generate an alert
      AverageAlertData data = getOutputData(merchantKey, txValue, change, lastSmaValue);
      alerts.add(data);
      //if (userGenerated) {   // if its user generated only the pass it to WebSocket
      if (merchantKey.merchantType == MerchantTransaction.MerchantType.BRICK_AND_MORTAR) {
        avgAlertNotificationPort.emit(getOutputData(data, String.format(brickMortarAlertMsg, txValue, change, lastSmaValue, merchantKey.merchantId, merchantKey.terminalId)));
      } else { // its internet based
        avgAlertNotificationPort.emit(getOutputData(data, String.format(internetAlertMsg, txValue, change, lastSmaValue, merchantKey.merchantId)));

      }
      //}
    }
  }
}
项目:apex-malhar    文件:QuotientMap.java   
/**
 * Generates tuples for each key and emits them. Only keys that are in the
 * denominator are iterated on If the key is only in the numerator, it gets
 * ignored (cannot do divide by 0) Clears internal data
 */
@Override
public void endWindow()
{
  HashMap<K, Double> tuples = new HashMap<K, Double>();
  for (Map.Entry<K, MutableDouble> e : denominators.entrySet()) {
    MutableDouble nval = numerators.get(e.getKey());
    if (nval == null) {
      tuples.put(e.getKey(), new Double(0.0));
    } else {
      tuples.put(e.getKey(), new Double((nval.doubleValue() / e.getValue()
          .doubleValue()) * mult_by));
    }
  }
  if (!tuples.isEmpty()) {
    quotient.emit(tuples);
  }
  numerators.clear();
  denominators.clear();
}
项目:apex-malhar    文件:AverageKeyVal.java   
/**
 * Emits average for each key in end window. Data is computed during process
 * on input port Clears the internal data before return.
 */
@Override
public void endWindow()
{
  for (Map.Entry<K, MutableDouble> e : sums.entrySet()) {
    K key = e.getKey();
    double d = e.getValue().doubleValue();
    if (doubleAverage.isConnected()) {
      doubleAverage.emit(new KeyValPair<K, Double>(key, d / counts.get(key).doubleValue()));
    }
    if (intAverage.isConnected()) {
      intAverage.emit(new KeyValPair<K, Integer>(key, (int)d));
    }
    if (longAverage.isConnected()) {
      longAverage.emit(new KeyValPair<K, Long>(key, (long)d));
    }
  }
  sums.clear();
  counts.clear();
}
项目:apex-malhar    文件:SumKeyVal.java   
/**
 * For each tuple (a key value pair) Adds the values for each key.
 */
@Override
public void process(KeyValPair<K, V> tuple)
{
  K key = tuple.getKey();
  if (!doprocessKey(key)) {
    return;
  }
  SumEntry val = sums.get(key);
  if (val == null) {
    val = new SumEntry(new MutableDouble(tuple.getValue().doubleValue()), true);
  } else {
    val.sum.add(tuple.getValue().doubleValue());
    val.changed = true;
  }
  sums.put(cloneKey(key), val);
}
项目:apex-malhar    文件:MultiWindowDimensionAggregation.java   
@Override
public void endWindow()
{
  int totalWindowsOccupied = cacheOject.size();
  for (Map.Entry<String, Map<String, KeyValPair<MutableDouble, Integer>>> e : outputMap.entrySet()) {
    for (Map.Entry<String, KeyValPair<MutableDouble, Integer>> dimensionValObj : e.getValue().entrySet()) {
      Map<String, DimensionObject<String>> outputData = new HashMap<String, DimensionObject<String>>();
      KeyValPair<MutableDouble, Integer> keyVal = dimensionValObj.getValue();
      if (operationType == AggregateOperation.SUM) {
        outputData.put(e.getKey(), new DimensionObject<String>(keyVal.getKey(), dimensionValObj.getKey()));
      } else if (operationType == AggregateOperation.AVERAGE) {
        if (keyVal.getValue() != 0) {
          double totalCount = ((double)(totalWindowsOccupied * applicationWindowSize)) / 1000;
          outputData.put(e.getKey(), new DimensionObject<String>(new MutableDouble(keyVal.getKey().doubleValue() / totalCount), dimensionValObj.getKey()));
        }
      }
      if (!outputData.isEmpty()) {
        output.emit(outputData);
      }
    }
  }
  currentWindow = (currentWindow + 1) % windowSize;

}
项目:apex-malhar    文件:DimensionAggregationUnifier.java   
@Override
public void process(Map<String, DimensionObject<String>> tuple)
{
  for (Map.Entry<String, DimensionObject<String>> e : tuple.entrySet()) {
    Map<String, MutableDouble> obj = dataMap.get(e.getKey());
    DimensionObject<String> eObj = e.getValue();
    if (obj == null) {
      obj = new HashMap<String, MutableDouble>();
      obj.put(eObj.getVal(), new MutableDouble(eObj.getCount()));
      dataMap.put(e.getKey(), obj);
    } else {
      MutableDouble n = obj.get(eObj.getVal());
      if (n == null) {
        obj.put(eObj.getVal(), new MutableDouble(eObj.getCount()));
      } else {
        n.add(eObj.getCount());
      }
    }
  }

}
项目:apex-malhar    文件:DimensionTimeBucketSumOperator.java   
@Override
public void process(String timeBucket, String key, String field, Number value)
{
  String finalKey = timeBucket + "|" + key;
  Map<String, Number> m = dataMap.get(finalKey);
  if (value == null) {
    return;
  }
  if (m == null) {
    m = new HashMap<String, Number>();
    m.put(field, new MutableDouble(value));
    dataMap.put(finalKey, m);
  } else {
    Number n = m.get(field);
    if (n == null) {
      m.put(field, new MutableDouble(value));
    } else {
      ((MutableDouble)n).add(value);
    }
  }
}
项目:IRIndex    文件:SchemaMetrics.java   
/**
 * Used to accumulate store metrics across multiple regions in a region
 * server.  These metrics are not "persistent", i.e. we keep overriding them
 * on every update instead of incrementing, so we need to accumulate them in
 * a temporary map before pushing them to the global metric collection.
 * @param tmpMap a temporary map for accumulating store metrics
 * @param storeMetricType the store metric type to increment
 * @param val the value to add to the metric
 */
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
    StoreMetricType storeMetricType, double val) {
  final String key = getStoreMetricName(storeMetricType);
  if (tmpMap.get(key) == null) {
    tmpMap.put(key, new MutableDouble(val));
  } else {
    tmpMap.get(key).add(val);
  }

  if (this == ALL_SCHEMA_METRICS) {
    // also compute the max value across all Stores on this server
    final String maxKey = getStoreMetricNameMax(storeMetricType);
    MutableDouble cur = tmpMap.get(maxKey);
    if (cur == null) {
      tmpMap.put(maxKey, new MutableDouble(val));
    } else if (cur.doubleValue() < val) {
      cur.setValue(val);
    }
  } else {
    ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
  }
}
项目:HBase-Research    文件:SchemaMetrics.java   
/**
 * Used to accumulate store metrics across multiple regions in a region
 * server.  These metrics are not "persistent", i.e. we keep overriding them
 * on every update instead of incrementing, so we need to accumulate them in
 * a temporary map before pushing them to the global metric collection.
 * @param tmpMap a temporary map for accumulating store metrics
 * @param storeMetricType the store metric type to increment
 * @param val the value to add to the metric
 */
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
    StoreMetricType storeMetricType, double val) {
  final String key = getStoreMetricName(storeMetricType);
  if (tmpMap.get(key) == null) {
    tmpMap.put(key, new MutableDouble(val));
  } else {
    tmpMap.get(key).add(val);
  }

  if (this == ALL_SCHEMA_METRICS) {
    // also compute the max value across all Stores on this server
    final String maxKey = getStoreMetricNameMax(storeMetricType);
    MutableDouble cur = tmpMap.get(maxKey);
    if (cur == null) {
      tmpMap.put(maxKey, new MutableDouble(val));
    } else if (cur.doubleValue() < val) {
      cur.setValue(val);
    }
  } else {
    ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
  }
}
项目:hbase-0.94.8-qod    文件:SchemaMetrics.java   
/**
 * Used to accumulate store metrics across multiple regions in a region
 * server.  These metrics are not "persistent", i.e. we keep overriding them
 * on every update instead of incrementing, so we need to accumulate them in
 * a temporary map before pushing them to the global metric collection.
 * @param tmpMap a temporary map for accumulating store metrics
 * @param storeMetricType the store metric type to increment
 * @param val the value to add to the metric
 */
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
    StoreMetricType storeMetricType, double val) {
  final String key = getStoreMetricName(storeMetricType);
  if (tmpMap.get(key) == null) {
    tmpMap.put(key, new MutableDouble(val));
  } else {
    tmpMap.get(key).add(val);
  }

  if (this == ALL_SCHEMA_METRICS) {
    // also compute the max value across all Stores on this server
    final String maxKey = getStoreMetricNameMax(storeMetricType);
    MutableDouble cur = tmpMap.get(maxKey);
    if (cur == null) {
      tmpMap.put(maxKey, new MutableDouble(val));
    } else if (cur.doubleValue() < val) {
      cur.setValue(val);
    }
  } else {
    ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
  }
}
项目:hbase-0.94.8-qod    文件:SchemaMetrics.java   
/**
 * Used to accumulate store metrics across multiple regions in a region
 * server.  These metrics are not "persistent", i.e. we keep overriding them
 * on every update instead of incrementing, so we need to accumulate them in
 * a temporary map before pushing them to the global metric collection.
 * @param tmpMap a temporary map for accumulating store metrics
 * @param storeMetricType the store metric type to increment
 * @param val the value to add to the metric
 */
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
    StoreMetricType storeMetricType, double val) {
  final String key = getStoreMetricName(storeMetricType);
  if (tmpMap.get(key) == null) {
    tmpMap.put(key, new MutableDouble(val));
  } else {
    tmpMap.get(key).add(val);
  }

  if (this == ALL_SCHEMA_METRICS) {
    // also compute the max value across all Stores on this server
    final String maxKey = getStoreMetricNameMax(storeMetricType);
    MutableDouble cur = tmpMap.get(maxKey);
    if (cur == null) {
      tmpMap.put(maxKey, new MutableDouble(val));
    } else if (cur.doubleValue() < val) {
      cur.setValue(val);
    }
  } else {
    ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
  }
}
项目:hindex    文件:SchemaMetrics.java   
/**
 * Used to accumulate store metrics across multiple regions in a region
 * server.  These metrics are not "persistent", i.e. we keep overriding them
 * on every update instead of incrementing, so we need to accumulate them in
 * a temporary map before pushing them to the global metric collection.
 * @param tmpMap a temporary map for accumulating store metrics
 * @param storeMetricType the store metric type to increment
 * @param val the value to add to the metric
 */
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
    StoreMetricType storeMetricType, double val) {
  final String key = getStoreMetricName(storeMetricType);
  if (tmpMap.get(key) == null) {
    tmpMap.put(key, new MutableDouble(val));
  } else {
    tmpMap.get(key).add(val);
  }

  if (this == ALL_SCHEMA_METRICS) {
    // also compute the max value across all Stores on this server
    final String maxKey = getStoreMetricNameMax(storeMetricType);
    MutableDouble cur = tmpMap.get(maxKey);
    if (cur == null) {
      tmpMap.put(maxKey, new MutableDouble(val));
    } else if (cur.doubleValue() < val) {
      cur.setValue(val);
    }
  } else {
    ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
  }
}
项目:apex-malhar    文件:AverageAlertingOperator.java   
@Override
public void process(KeyValPair<MerchantKey, Double> tuple)
{
  MutableDouble currentSma = currentSMAMap.get(tuple.getKey());
  if (currentSma == null) { // first sma for the given key
    double sma = tuple.getValue();
    currentSMAMap.put(tuple.getKey(), new MutableDouble(sma));
    //lastSMAMap.put(tuple.getKey(), new MutableDouble(sma));
  } else { // move the current SMA value to the last SMA Map
    //lastSMAMap.get(tuple.getKey()).setValue(currentSma.getValue());
    currentSma.setValue(tuple.getValue());  // update the current SMA value
  }
}
项目:apex-malhar    文件:NumberSummation.java   
protected Number convertToNumber(Object o)
{
  if (o == null) {
    return null;
  } else if (o instanceof MutableDouble || o instanceof MutableLong) {
    return (Number)o;
  } else if (o instanceof Double || o instanceof Float) {
    return new MutableDouble((Number)o);
  } else if (o instanceof Number) {
    return new MutableLong((Number)o);
  } else {
    return new MutableDouble(o.toString());
  }
}
项目:apex-malhar    文件:ChangeKeyVal.java   
/**
 * Process each key, compute change or percent, and emit it.
 */
@Override
public void process(KeyValPair<K, V> tuple)
{
  K key = tuple.getKey();
  if (!doprocessKey(key)) {
    return;
  }
  MutableDouble bval = basemap.get(key);
  if (bval != null) { // Only process keys that are in the basemap
    double cval = tuple.getValue().doubleValue() - bval.doubleValue();
    change.emit(new KeyValPair<K, V>(cloneKey(key), getValue(cval)));
    percent.emit(new KeyValPair<K, Double>(cloneKey(key), (cval / bval.doubleValue()) * 100));
  }
}
项目:apex-malhar    文件:DimensionOperatorUnifier.java   
@Override
public void endWindow()
{
  Map<String, DimensionObject<String>> outputAggregationsObject;

  for (Entry<String, Map<String, Map<AggregateOperation, Number>>> keys : unifiedCache.entrySet()) {
    String key = keys.getKey();
    Map<String, Map<AggregateOperation, Number>> dimValues = keys.getValue();

    for (Entry<String, Map<AggregateOperation, Number>> dimValue : dimValues.entrySet()) {
      String dimValueName = dimValue.getKey();
      Map<AggregateOperation, Number> operations = dimValue.getValue();

      outputAggregationsObject = new HashMap<String, DimensionObject<String>>();

      for (Entry<AggregateOperation, Number> operation : operations.entrySet()) {
        AggregateOperation aggrOperationType = operation.getKey();
        Number aggr = operation.getValue();

        String outKey = key + "." + aggrOperationType.name();
        DimensionObject<String> outDimObj = new DimensionObject<String>(new MutableDouble(aggr), dimValueName);

        outputAggregationsObject.put(outKey, outDimObj);

      }
      aggregationsOutput.emit(outputAggregationsObject);
    }

  }
}
项目:apex-malhar    文件:MultiWindowDimensionAggregation.java   
@Override
public void setup(OperatorContext arg0)
{
  if (arg0 != null) {
    applicationWindowSize = arg0.getValue(OperatorContext.APPLICATION_WINDOW_COUNT);
  }
  if (cacheOject == null) {
    cacheOject = new HashMap<Integer, Map<String, Map<String, Number>>>(windowSize);
  }
  if (outputMap == null) {
    outputMap = new HashMap<String, Map<String, KeyValPair<MutableDouble, Integer>>>();
  }
  setUpPatternList();

}
项目:apex-malhar    文件:DimensionAggregationUnifier.java   
@Override
public void beginWindow(long windowId)
{
  dataMap = new HashMap<String, Map<String, MutableDouble>>();
  // TODO Auto-generated method stub

}
项目:apex-malhar    文件:DimensionAggregationUnifier.java   
@Override
public void endWindow()
{
  for (Map.Entry<String, Map<String, MutableDouble>> e : dataMap.entrySet()) {
    for (Map.Entry<String, MutableDouble> dimensionValObj : e.getValue().entrySet()) {
      Map<String, DimensionObject<String>> outputData = new HashMap<String, DimensionObject<String>>();
      outputData.put(e.getKey(), new DimensionObject<String>(dimensionValObj.getValue(), dimensionValObj.getKey()));
      output.emit(outputData);
    }
  }
  dataMap.clear();
}
项目:apex-malhar    文件:TopNUniqueSiteStatsTest.java   
@SuppressWarnings({"rawtypes", "unchecked"})
public void testNodeProcessingSchema(TopNUnique oper)
{
  CollectorTestSink sortSink = new CollectorTestSink();
  oper.top.setSink(sortSink);
  oper.setN(3);

  oper.beginWindow(0);
  HashMap<String, DimensionObject<String>> input = new HashMap<String, DimensionObject<String>>();

  input.put("url", new DimensionObject<String>(new MutableDouble(10), "abc"));
  oper.data.process(input);

  input.clear();
  input.put("url", new DimensionObject<String>(new MutableDouble(1), "def"));
  input.put("url1", new DimensionObject<String>(new MutableDouble(1), "def"));
  oper.data.process(input);

  input.clear();
  input.put("url", new DimensionObject<String>(new MutableDouble(101), "ghi"));
  input.put("url1", new DimensionObject<String>(new MutableDouble(101), "ghi"));
  oper.data.process(input);

  input.clear();
  input.put("url", new DimensionObject<String>(new MutableDouble(50), "jkl"));
  oper.data.process(input);

  input.clear();
  input.put("url", new DimensionObject<String>(new MutableDouble(50), "jkl"));
  input.put("url3", new DimensionObject<String>(new MutableDouble(50), "jkl"));
  oper.data.process(input);
  oper.endWindow();

  Assert.assertEquals("number emitted tuples", 3, sortSink.collectedTuples.size());
  for (Object o : sortSink.collectedTuples) {
    log.debug(o.toString());
  }
  log.debug("Done testing round\n");
}
项目:apex-malhar    文件:BasicCountersTest.java   
@Test
public void testBasicCounters() throws InstantiationException, IllegalAccessException
{
  BasicCounters<MutableDouble> doubleBasicCounters = new BasicCounters<MutableDouble>(MutableDouble.class);
  MutableDouble counterA = doubleBasicCounters.findCounter(CounterKeys.A);

  counterA.increment();

  MutableDouble counterAInCounters = doubleBasicCounters.getCounter(CounterKeys.A);
  Assert.assertNotNull("null", doubleBasicCounters.getCounter(CounterKeys.A));
  Assert.assertTrue("equality", counterAInCounters.equals(counterA));
  Assert.assertEquals(counterA.doubleValue(), 1.0, 0);
}
项目:gerrit    文件:ReviewerRecommender.java   
private Map<Account.Id, MutableDouble> baseRankingForEmptyQuery(double baseWeight)
    throws OrmException, IOException, ConfigInvalidException {
  // Get the user's last 25 changes, check approvals
  try {
    List<ChangeData> result =
        queryProvider
            .get()
            .setLimit(25)
            .setRequestedFields(ChangeField.APPROVAL)
            .query(changeQueryBuilder.owner("self"));
    Map<Account.Id, MutableDouble> suggestions = new HashMap<>();
    for (ChangeData cd : result) {
      for (PatchSetApproval approval : cd.currentApprovals()) {
        Account.Id id = approval.getAccountId();
        if (suggestions.containsKey(id)) {
          suggestions.get(id).add(baseWeight);
        } else {
          suggestions.put(id, new MutableDouble(baseWeight));
        }
      }
    }
    return suggestions;
  } catch (QueryParseException e) {
    // Unhandled, because owner:self will never provoke a QueryParseException
    log.error("Exception while suggesting reviewers", e);
    return ImmutableMap.of();
  }
}
项目:pm    文件:HighLowSolver.java   
private Map<Integer, Double> simpleHighPeaks(Double[] periodData, Double[] periodSmoothedCeiling, MutableDouble amountBelowSmoothingCeiling) {

    boolean noSmoothedConstraint = periodSmoothedCeiling.length == 0;

    Double max = -1d;
    SortedMap<Integer, Double> highPeaks = new TreeMap<Integer, Double>();
    highPeaks.put(0, 0d);
    for (int i = 1 ; i < periodData.length -1; i++) {

        if (noSmoothedConstraint || periodSmoothedCeiling[i] > periodData[i]) {
            amountBelowSmoothingCeiling.increment();
        }

        boolean isIdxAbovePrev = periodData[i-1] < periodData[i];
        boolean isIdxAboveSucc = periodData[i+1] < periodData[i];
        boolean isIdxAboveSmoothed = noSmoothedConstraint || periodSmoothedCeiling[i] <= periodData[i];
        if (isIdxAbovePrev && isIdxAboveSucc && isIdxAboveSmoothed) {
            highPeaks.put(i,periodData[i]);
        } else {
            highPeaks.put(i,0d);
        }

        if (periodData[i]  > max) max = periodData[i];
    }

    if (periodData[periodData.length-1] >= max && (noSmoothedConstraint || periodSmoothedCeiling[periodData.length-1] <= periodData[periodData.length-1])) {
        highPeaks.put(periodData.length-1, periodData[periodData.length-1]);
    } else {
        highPeaks.put(periodData.length-1, 0d);
    }

    amountBelowSmoothingCeiling.setValue(amountBelowSmoothingCeiling.doubleValue()/periodData.length);

    return highPeaks;
}
项目:pm    文件:HighLowSolver.java   
private Map<Integer, Double> simpleLowTroughs(Double[] pData, Double[] periodSmoothedFloor, MutableDouble amountAboveSmoothingFloor) {

        boolean noSmoothedConstraint = periodSmoothedFloor.length == 0;

        Double min = Double.MAX_VALUE;
        SortedMap<Integer, Double> lowTroughs = new TreeMap<Integer, Double>();
        lowTroughs.put(0,0d);
        for (int i = 1 ; i < pData.length-1; i++) {

            if (noSmoothedConstraint || periodSmoothedFloor[i] < pData[i]) {
                amountAboveSmoothingFloor.increment();
            }

            boolean isIdxBelowPrev = pData[i-1] > pData[i];
            boolean isIdxBelowSucc = pData[i+1] > pData[i];
            boolean isIdxBelowSmothed = noSmoothedConstraint || periodSmoothedFloor[i] >= pData[i];
            if ( isIdxBelowPrev && isIdxBelowSucc && isIdxBelowSmothed) {
                lowTroughs.put(i,pData[i]);
            } else {
                lowTroughs.put(i,0d);
            }

            if (pData[i] < min) min = pData[i];
        }

        if (pData[pData.length-1] < min  && (noSmoothedConstraint || periodSmoothedFloor[pData.length-1] >= pData[pData.length-1])) {
            lowTroughs.put(pData.length-1,pData[pData.length-1]);
        } else {
            lowTroughs.put(pData.length-1,0d);
        }

        amountAboveSmoothingFloor.setValue(amountAboveSmoothingFloor.doubleValue()/pData.length);

        return lowTroughs;
    }
项目:apex-malhar    文件:SumCountMap.java   
/**
 * Emits on all ports that are connected. Data is precomputed during process
 * on input port endWindow just emits it for each key Clears the internal data
 * before return
 */
@Override
public void endWindow()
{

  // Should allow users to send each key as a separate tuple to load balance
  // This is an aggregate node, so load balancing would most likely not be
  // needed

  HashMap<K, V> tuples = new HashMap<K, V>();
  HashMap<K, Integer> ctuples = new HashMap<K, Integer>();
  HashMap<K, Double> dtuples = new HashMap<K, Double>();
  HashMap<K, Integer> ituples = new HashMap<K, Integer>();
  HashMap<K, Float> ftuples = new HashMap<K, Float>();
  HashMap<K, Long> ltuples = new HashMap<K, Long>();
  HashMap<K, Short> stuples = new HashMap<K, Short>();

  for (Map.Entry<K, MutableDouble> e : sums.entrySet()) {
    K key = e.getKey();
    MutableDouble val = e.getValue();
    tuples.put(key, getValue(val.doubleValue()));
    dtuples.put(key, val.doubleValue());
    ituples.put(key, val.intValue());
    ftuples.put(key, val.floatValue());
    ltuples.put(key, val.longValue());
    stuples.put(key, val.shortValue());
    // ctuples.put(key, counts.get(e.getKey()).toInteger());
    MutableInt c = counts.get(e.getKey());
    if (c != null) {
      ctuples.put(key, c.toInteger());
    }
  }

  sum.emit(tuples);
  sumDouble.emit(dtuples);
  sumInteger.emit(ituples);
  sumLong.emit(ltuples);
  sumShort.emit(stuples);
  sumFloat.emit(ftuples);
  count.emit(ctuples);
  clearCache();
}
项目:apex-malhar    文件:DimensionOperatorUnifierTest.java   
@Test
@SuppressWarnings("unchecked")
public void testOperator()
{
  DimensionOperatorUnifier unifier = new DimensionOperatorUnifier();
  CollectorTestSink sink = new CollectorTestSink();

  unifier.aggregationsOutput.setSink(sink);

  unifier.beginWindow(1);

  Map<String, DimensionObject<String>> tuple1 = new HashMap<String, DimensionObject<String>>();

  tuple1.put("m|201402121900|0|65537|131074|bytes.AVERAGE", new DimensionObject<String>(new MutableDouble(75), "a"));
  tuple1.put("m|201402121900|0|65537|131074|bytes.COUNT", new DimensionObject<String>(new MutableDouble(3.0), "a"));
  tuple1.put("m|201402121900|0|65537|131074|bytes.SUM", new DimensionObject<String>(new MutableDouble(225), "a"));

  Map<String, DimensionObject<String>> tuple2 = new HashMap<String, DimensionObject<String>>();

  tuple2.put("m|201402121900|0|65537|131074|bytes.AVERAGE", new DimensionObject<String>(new MutableDouble(50), "a"));
  tuple2.put("m|201402121900|0|65537|131074|bytes.COUNT", new DimensionObject<String>(new MutableDouble(2.0), "a"));
  tuple2.put("m|201402121900|0|65537|131074|bytes.SUM", new DimensionObject<String>(new MutableDouble(100), "a"));

  Map<String, DimensionObject<String>> tuple3 = new HashMap<String, DimensionObject<String>>();

  tuple3.put("m|201402121900|0|65537|131074|bytes.AVERAGE", new DimensionObject<String>(new MutableDouble(50), "z"));
  tuple3.put("m|201402121900|0|65537|131074|bytes.COUNT", new DimensionObject<String>(new MutableDouble(2.0), "z"));
  tuple3.put("m|201402121900|0|65537|131074|bytes.SUM", new DimensionObject<String>(new MutableDouble(100), "z"));

  Map<String, DimensionObject<String>> tuple4 = new HashMap<String, DimensionObject<String>>();

  tuple4.put("m|201402121900|0|65537|131075|bytes.AVERAGE", new DimensionObject<String>(new MutableDouble(14290.5), "b"));
  tuple4.put("m|201402121900|0|65537|131075|bytes.COUNT", new DimensionObject<String>(new MutableDouble(2.0), "b"));
  tuple4.put("m|201402121900|0|65537|131075|bytes.SUM", new DimensionObject<String>(new MutableDouble(28581.0), "b"));

  Map<String, DimensionObject<String>> tuple5 = new HashMap<String, DimensionObject<String>>();

  tuple5.put("m|201402121900|0|65537|131076|bytes.AVERAGE", new DimensionObject<String>(new MutableDouble(290.75), "c"));
  tuple5.put("m|201402121900|0|65537|131076|bytes.COUNT", new DimensionObject<String>(new MutableDouble(10.0), "c"));
  tuple5.put("m|201402121900|0|65537|131076|bytes.SUM", new DimensionObject<String>(new MutableDouble(8581.0), "c"));

  unifier.process(tuple1);
  unifier.process(tuple2);
  unifier.process(tuple3);
  unifier.process(tuple4);
  unifier.process(tuple5);

  unifier.endWindow();

  @SuppressWarnings("unchecked")
  List<Map<String, DimensionObject<String>>> tuples = sink.collectedTuples;

  Assert.assertEquals("Tuple Count", 4, tuples.size());

  for (Map<String, DimensionObject<String>> map : tuples) {
    for (Entry<String, DimensionObject<String>> entry : map.entrySet()) {
      String key = entry.getKey();
      DimensionObject<String> dimObj = entry.getValue();
      if (key.equals("m|201402121900|0|65537|131074|bytes.AVERAGE") && dimObj.getVal().equals("a")) {
        Assert.assertEquals("average for key " + key + " and dimension key " + "a", new MutableDouble(65), dimObj.getCount());
      }

      if (key.equals("m|201402121900|0|65537|131074|bytes.SUM") && dimObj.getVal().equals("z")) {
        Assert.assertEquals("sum for key " + key + " and dimension key " + "z", new MutableDouble(100), dimObj.getCount());
      }

      if (key.equals("m|201402121900|0|65537|131076|bytes.COUNT") && dimObj.getVal().equals("c")) {
        Assert.assertEquals("count for key " + key + " and dimension key " + "c", new MutableDouble(10), dimObj.getCount());
      }
    }
  }
}
项目:apex-malhar    文件:LogstreamTopNTest.java   
@Test
@SuppressWarnings("unchecked")
public void testOperator()
{
  LogstreamTopN oper = new LogstreamTopN();
  LogstreamPropertyRegistry registry = new LogstreamPropertyRegistry();
  registry.bind(LogstreamUtil.LOG_TYPE, "apache");
  registry.bind(LogstreamUtil.FILTER, "default");
  oper.setRegistry(registry);
  oper.setN(5);

  CollectorTestSink mapSink = new CollectorTestSink();
  oper.top.setSink(mapSink);

  oper.beginWindow(0);
  Map<String, DimensionObject<String>> tuple1 = new HashMap<String, DimensionObject<String>>();
  DimensionObject<String> dimObja = new DimensionObject<String>(new MutableDouble(10), "a");
  tuple1.put("m|201402121900|0|65535|131075|val.COUNT", dimObja);
  oper.data.process(tuple1);

  DimensionObject<String> dimObjb = new DimensionObject<String>(new MutableDouble(1), "b");
  tuple1.put("m|201402121900|0|65535|131075|val.COUNT", dimObjb);
  oper.data.process(tuple1);

  DimensionObject<String> dimObjc = new DimensionObject<String>(new MutableDouble(5), "c");
  tuple1.put("m|201402121900|0|65535|131075|val.COUNT", dimObjc);
  oper.data.process(tuple1);

  DimensionObject<String> dimObjd = new DimensionObject<String>(new MutableDouble(2), "d");
  tuple1.put("m|201402121900|0|65535|131075|val.COUNT", dimObjd);
  oper.data.process(tuple1);

  DimensionObject<String> dimObje = new DimensionObject<String>(new MutableDouble(15), "e");
  tuple1.put("m|201402121900|0|65535|131075|val.COUNT", dimObje);
  oper.data.process(tuple1);

  DimensionObject<String> dimObjf = new DimensionObject<String>(new MutableDouble(4), "f");
  tuple1.put("m|201402121900|0|65535|131075|val.COUNT", dimObjf);
  oper.data.process(tuple1);

  oper.endWindow();

  @SuppressWarnings("unchecked")
  Map<String, List<DimensionObject<String>>> tuples = (Map<String, List<DimensionObject<String>>>)mapSink.collectedTuples.get(0);
  List<DimensionObject<String>> outList = tuples.get("m|201402121900|0|65535|131075|val.COUNT");

  List<DimensionObject<String>> expectedList = new ArrayList<DimensionObject<String>>();
  expectedList.add(dimObje);
  expectedList.add(dimObja);
  expectedList.add(dimObjc);
  expectedList.add(dimObjf);
  expectedList.add(dimObjd);

  Assert.assertEquals("Size", expectedList.size(), outList.size());
  Assert.assertEquals("compare list", expectedList, outList);
}
项目:apex-malhar    文件:SumKeyVal.java   
SumEntry(MutableDouble sum, boolean changed)
{
  this.sum = sum;
  this.changed = changed;
}
项目:apex-malhar    文件:MultiWindowDimensionAggregation.java   
@Override
public void beginWindow(long arg0)
{
  Map<String, Map<String, Number>> currentWindowMap = cacheOject.get(currentWindow);
  if (currentWindowMap == null) {
    currentWindowMap = new HashMap<String, Map<String, Number>>();
  } else {
    for (Map.Entry<String, Map<String, Number>> tupleEntry : currentWindowMap.entrySet()) {
      String tupleKey = tupleEntry.getKey();
      Map<String, Number> tupleValue = tupleEntry.getValue();
      int currentPattern = 0;
      for (Pattern pattern : patternList) {
        Matcher matcher = pattern.matcher(tupleKey);
        if (matcher.matches()) {
          String currentPatternString = dimensionArrayString.get(currentPattern);
          Map<String, KeyValPair<MutableDouble, Integer>> currentPatternMap = outputMap.get(currentPatternString);
          if (currentPatternMap != null) {
            StringBuilder builder = new StringBuilder(matcher.group(2));
            for (int i = 1; i < dimensionArray.get(currentPattern).length; i++) {
              builder.append("," + matcher.group(i + 2));
            }
            KeyValPair<MutableDouble, Integer> currentDimensionKeyValPair = currentPatternMap.get(builder.toString());
            if (currentDimensionKeyValPair != null) {
              currentDimensionKeyValPair.getKey().add(0 - tupleValue.get(dimensionKeyVal).doubleValue());
              currentDimensionKeyValPair.setValue(currentDimensionKeyValPair.getValue() - 1);
              if (currentDimensionKeyValPair.getKey().doubleValue() == 0.0) {
                currentPatternMap.remove(builder.toString());
              }
            }
          }
          break;
        }
        currentPattern++;
      }

    }
  }
  currentWindowMap.clear();
  if (patternList == null || patternList.isEmpty()) {
    setUpPatternList();
  }

}
项目:apex-malhar    文件:DimensionObject.java   
public DimensionObject(MutableDouble count, T s)
{
  this.count = count;
  val = s;
}
项目:apex-malhar    文件:DimensionObject.java   
public MutableDouble getCount()
{
  return count;
}
项目:apex-malhar    文件:DimensionObject.java   
public void setCount(MutableDouble count)
{
  this.count = count;
}
项目:apex-malhar    文件:SimpleMovingAverageObject.java   
public SimpleMovingAverageObject()
{
  sum = new MutableDouble(0);
  count = new MutableInt(0);
}
项目:apex-malhar    文件:MultiWindowDimensionAggregationTest.java   
@SuppressWarnings({ "rawtypes", "unchecked" })
public void testNodeProcessingSchema(MultiWindowDimensionAggregation oper)
{

  oper.setWindowSize(3);
  List<int[]> dimensionArrayList = new ArrayList<int[]>();
  int[] dimensionArray = {0, 1};
  int[] dimensionArray_2 = {0};
  dimensionArrayList.add(dimensionArray);
  dimensionArrayList.add(dimensionArray_2);
  oper.setDimensionArray(dimensionArrayList);

  oper.setTimeBucket("m");
  oper.setDimensionKeyVal("0");

  oper.setOperationType(AggregateOperation.AVERAGE);
  oper.setup(null);
  CollectorTestSink sortSink = new CollectorTestSink();
  oper.output.setSink(sortSink);

  oper.beginWindow(0);
  Map<String, Map<String, Number>> data_0 = new HashMap<String, Map<String, Number>>();
  Map<String, Number> input_0 = new HashMap<String, Number>();

  input_0.put("0", new MutableDouble(9));
  input_0.put("1", new MutableDouble(9));
  input_0.put("2", new MutableDouble(9));
  data_0.put("m|20130823131512|0:abc|1:ff", input_0);
  data_0.put("m|20130823131512|0:abc", input_0);
  data_0.put("m|20130823131512|0:abc|1:ie", input_0);
  Map<String, Number> input_new = new HashMap<String, Number>();
  input_new.put("0", new MutableDouble(19));
  input_new.put("1", new MutableDouble(19));
  input_new.put("2", new MutableDouble(19));
  data_0.put("m|20130823131512|0:def|1:ie", input_new);
  oper.data.process(data_0);
  oper.endWindow();

  Map<String, Map<String, Number>> data_1 = new HashMap<String, Map<String, Number>>();
  Map<String, Number> input_1 = new HashMap<String, Number>();
  oper.beginWindow(1);

  input_1.put("0", new MutableDouble(9));
  input_1.put("1", new MutableDouble(9));
  input_1.put("2", new MutableDouble(9));
  data_1.put("m|20130823131513|0:def|1:ff", input_1);
  data_1.put("m|20130823131513|0:abc|1:ie", input_1);
  oper.data.process(data_1);
  oper.endWindow();

  Map<String, Map<String, Number>> data_2 = new HashMap<String, Map<String, Number>>();
  Map<String, Number> input_2 = new HashMap<String, Number>();
  oper.beginWindow(2);

  input_2.put("0", new MutableDouble(19));
  input_2.put("1", new MutableDouble(19));
  input_2.put("2", new MutableDouble(19));
  data_2.put("m|20130823131514|0:def|1:ff", input_2);
  data_2.put("m|20130823131514|0:abc|1:ie", input_2);
  oper.data.process(data_2);
  oper.endWindow();

  Map<String, Map<String, Number>> data_3 = new HashMap<String, Map<String, Number>>();
  Map<String, Number> input_3 = new HashMap<String, Number>();
  oper.beginWindow(3);
  input_3.put("0", new MutableDouble(19));
  input_3.put("1", new MutableDouble(19));
  input_3.put("2", new MutableDouble(19));
  data_3.put("m|20130823131514|0:def|1:ff", input_3);
  data_3.put("m|20130823131514|0:abc|1:ie", input_3);
  oper.data.process(data_3);
  oper.endWindow();

  Assert.assertEquals("number emitted tuples", 16, sortSink.collectedTuples.size());
  for (Object o : sortSink.collectedTuples) {
    logger.debug(o.toString());
  }
  logger.debug("Done testing round\n");
}
项目:apex-malhar    文件:DimensionTimeBucketSumOperatorTest.java   
@Test
public void testDimensionTimeBucket() throws InterruptedException
{
  DimensionTimeBucketSumOperator oper = new DimensionTimeBucketSumOperator();
  CollectorTestSink sortSink = new CollectorTestSink();
  oper.out.setSink(sortSink);

  oper.addDimensionKeyName("ipAddr");
  oper.addDimensionKeyName("url");
  oper.addDimensionKeyName("status");
  oper.addDimensionKeyName("agent");
  oper.addValueKeyName("bytes");
  Set<String> dimensionKey = new HashSet<String>();

  dimensionKey.add("ipAddr");
  dimensionKey.add("url");
  try {
    oper.addCombination(dimensionKey);
  } catch (NoSuchFieldException e) {
    //ignored
  }
  oper.setTimeBucketFlags(AbstractDimensionTimeBucketOperator.TIMEBUCKET_MINUTE);
  oper.setup(null);
  oper.beginWindow(0);
  oper.in.process(getMap("10.10.1.1", "/movies", "200", "FF", 20));
  oper.in.process(getMap("10.10.1.2", "/movies", "200", "FF", 20));
  oper.in.process(getMap("10.10.1.2", "/movies", "200", "FF", 20));
  oper.endWindow();
  Map<String, Map<String, Number>> outputMap = Maps.newHashMap();
  Map<String, Number> key1 = Maps.newHashMap();
  key1.put("1", new MutableDouble(40.0));
  key1.put("0", new MutableDouble(2.0));
  outputMap.put("m|197001010000|0:10.10.1.2|1:/movies", key1);
  Map<String, Number> key2 = Maps.newHashMap();
  key2.put("0", new MutableDouble(1.0));
  key2.put("1", new MutableDouble(20.0));
  outputMap.put("m|197001010000|0:10.10.1.1|1:/movies", key2);
  Assert.assertEquals("number emitted tuples", 1, sortSink.collectedTuples.size());
  for (Object o : sortSink.collectedTuples) {
    Assert.assertEquals("content of tuple ", outputMap, o);
    logger.debug(o.toString());
  }
  logger.debug("Done testing round\n");
}
项目:gerrit    文件:ReviewerRecommender.java   
private Map<Account.Id, MutableDouble> baseRankingForCandidateList(
    List<Account.Id> candidates, ProjectState projectState, double baseWeight)
    throws OrmException, IOException, ConfigInvalidException {
  // Get each reviewer's activity based on number of applied labels
  // (weighted 10d), number of comments (weighted 0.5d) and number of owned
  // changes (weighted 1d).
  Map<Account.Id, MutableDouble> reviewers = new LinkedHashMap<>();
  if (candidates.size() == 0) {
    return reviewers;
  }
  List<Predicate<ChangeData>> predicates = new ArrayList<>();
  for (Account.Id id : candidates) {
    try {
      Predicate<ChangeData> projectQuery = changeQueryBuilder.project(projectState.getName());

      // Get all labels for this project and create a compound OR query to
      // fetch all changes where users have applied one of these labels
      List<LabelType> labelTypes = projectState.getLabelTypes().getLabelTypes();
      List<Predicate<ChangeData>> labelPredicates = new ArrayList<>(labelTypes.size());
      for (LabelType type : labelTypes) {
        labelPredicates.add(changeQueryBuilder.label(type.getName() + ",user=" + id));
      }
      Predicate<ChangeData> reviewerQuery =
          Predicate.and(projectQuery, Predicate.or(labelPredicates));

      Predicate<ChangeData> ownerQuery =
          Predicate.and(projectQuery, changeQueryBuilder.owner(id.toString()));
      Predicate<ChangeData> commentedByQuery =
          Predicate.and(projectQuery, changeQueryBuilder.commentby(id.toString()));

      predicates.add(reviewerQuery);
      predicates.add(ownerQuery);
      predicates.add(commentedByQuery);
      reviewers.put(id, new MutableDouble());
    } catch (QueryParseException e) {
      // Unhandled: If an exception is thrown, we won't increase the
      // candidates's score
      log.error("Exception while suggesting reviewers", e);
    }
  }

  List<List<ChangeData>> result = queryProvider.get().setLimit(25).noFields().query(predicates);

  Iterator<List<ChangeData>> queryResultIterator = result.iterator();
  Iterator<Account.Id> reviewersIterator = reviewers.keySet().iterator();

  int i = 0;
  Account.Id currentId = null;
  while (queryResultIterator.hasNext()) {
    List<ChangeData> currentResult = queryResultIterator.next();
    if (i % WEIGHTS.length == 0) {
      currentId = reviewersIterator.next();
    }

    reviewers.get(currentId).add(WEIGHTS[i % WEIGHTS.length] * baseWeight * currentResult.size());
    i++;
  }
  return reviewers;
}
项目:gatk    文件:CalculateContamination.java   
@Override
public Object doWork() {
    final List<PileupSummary> sites = filterSites(PileupSummary.readFromFile(inputPileupSummariesTable));

    // used the matched normal to genotype (i.e. find hom alt sites) if available
    final List<PileupSummary> genotypingSites = matchedPileupSummariesTable == null ? sites :
            filterSites(PileupSummary.readFromFile(matchedPileupSummariesTable));

    // we partition the genome into contiguous allelic copy-number segments in order to infer the local minor
    // allele fraction at each site.  This is important because a minor allele fraction close to 1/2 (neutral)
    // allows hets and hom alts to be distinguished easily, while a low minor allele fraction makes it harder
    // to discriminate.  It is crucial to know which site are true hom alts and which sites are hets with
    // loss of heterozygosity.  We do this for the genotyping sample because that is the sample from which
    // the hom alts are deduced.
    final List<List<PileupSummary>> genotypingSegments = findSegments(genotypingSites);


    List<PileupSummary> homAltGenotypingSites = new ArrayList<>();
    final MutableDouble genotypingContamination = new MutableDouble(INITIAL_CONTAMINATION_GUESS);

    for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++) {
        List<List<PileupSummary>> homAltSitesBySegment = Arrays.asList(new ArrayList<>());
        final MutableDouble minorAlleleFractionThreshold = new MutableDouble(STRICT_LOH_MAF_THRESHOLD);
        while (homAltSitesBySegment.stream().mapToInt(List::size).sum() < DESIRED_MINIMUM_HOM_ALT_COUNT && minorAlleleFractionThreshold.doubleValue() > 0) {
            homAltSitesBySegment = genotypingSegments.stream()
                    .map(segment -> segmentHomAlts(segment, genotypingContamination.doubleValue(), minorAlleleFractionThreshold.doubleValue()))
                    .collect(Collectors.toList());
            minorAlleleFractionThreshold.subtract(MINOR_ALLELE_FRACTION_STEP_SIZE);
        }
        homAltGenotypingSites = homAltSitesBySegment.stream().flatMap(List::stream).collect(Collectors.toList());
        final double newGenotypingContamination = calculateContamination(homAltGenotypingSites, errorRate(genotypingSites)).getLeft();
        if (Math.abs(newGenotypingContamination - genotypingContamination.doubleValue()) < CONTAMINATION_CONVERGENCE_THRESHOLD) {
            break;
        }
        genotypingContamination.setValue(newGenotypingContamination);
    }

    final List<PileupSummary> homAltSites = subsetSites(sites, homAltGenotypingSites);
    final Pair<Double, Double> contaminationAndError = calculateContamination(homAltSites, errorRate(sites));
    final double contamination = contaminationAndError.getLeft();
    final double error = contaminationAndError.getRight();
    ContaminationRecord.writeToFile(Arrays.asList(new ContaminationRecord(ContaminationRecord.Level.WHOLE_BAM.toString(), contamination, error)), outputTable);

    return "SUCCESS";
}
项目:pm    文件:HighLowSolver.java   
public Boolean higherHigh(Double[] periodData, Double[] periodSmoothedCeiling, Double alphaBalance, ArrayList<Double> regLine, MutableInt firstPeakIdx, MutableInt lastPeakIdx) {

        MutableDouble amountBelowSmoothingCeiling = new MutableDouble(0);

        //Double[] highPeaks = highPeaks(firstPeakIdx, lastPeakIdx, amountBelowSmoothingCeiling, periodData, periodSmoothedCeiling).values().toArray(new Double[0]);
        Double[] highPeaks = bestHighTangente(periodData, periodSmoothedCeiling, firstPeakIdx, lastPeakIdx, amountBelowSmoothingCeiling).values().toArray(new Double[0]);
        if (firstPeakIdx.intValue() == -1) return false;
        if (isNotBalanced(amountBelowSmoothingCeiling, firstPeakIdx.doubleValue(), lastPeakIdx.doubleValue(), periodData.length, alphaBalance)) return false;

        Double slopeCoef = dataSlope(firstPeakIdx.doubleValue(), lastPeakIdx.doubleValue(), highPeaks);
        if (goesDownOrFlat(slopeCoef)) return false;

        boolean isTrue = isDataBelowRegLine(periodData, firstPeakIdx.intValue(), slopeCoef, regLine);

        if (isTrue) {
            printRes("hh",periodData, periodSmoothedCeiling, highPeaks, slopeCoef, regLine);
        }

        return isTrue;

    }
项目:pm    文件:HighLowSolver.java   
public Boolean lowerHigh(Double[] periodData, Double[] periodSmoothedCeiling, Double alphaBalance, ArrayList<Double> regLine, MutableInt firstPeakIdx, MutableInt lastPeakIdx) {

        MutableDouble amountBelowSmoothingCeiling = new MutableDouble(0);

        Double[] highPeaks = bestHighTangente(periodData, periodSmoothedCeiling, firstPeakIdx, lastPeakIdx, amountBelowSmoothingCeiling).values().toArray(new Double[0]);
        if (firstPeakIdx.intValue() == -1) return false;
        if (isNotBalanced(amountBelowSmoothingCeiling, firstPeakIdx.doubleValue(), lastPeakIdx.doubleValue(), periodData.length, alphaBalance)) return false;

        Double slopeCoef = dataSlope(firstPeakIdx.doubleValue(), lastPeakIdx.doubleValue(), highPeaks);
        if (goesUpOrFlat(slopeCoef)) return false;

        boolean isTrue = isDataBelowRegLine(periodData, firstPeakIdx.intValue(), slopeCoef, regLine);

        if (isTrue) {
            printRes("lh",periodData, periodSmoothedCeiling, highPeaks, slopeCoef, regLine);
        }

        return isTrue;

    }