Java 类com.codahale.metrics.Histogram 实例源码

项目:dropwizard-influxdb-reporter    文件:InfluxDbMeasurementReporter.java   
@Override
public void report(final SortedMap<String, Gauge> gauges,
                   final SortedMap<String, Counter> counters,
                   final SortedMap<String, Histogram> histograms,
                   final SortedMap<String, Meter> meters,
                   final SortedMap<String, Timer> timers) {
  final long timestamp = clock.instant().toEpochMilli();

  final ImmutableList<InfluxDbMeasurement> influxDbMeasurements = ImmutableList.<InfluxDbMeasurement>builder()
    .addAll(transformer.fromGauges(gauges, timestamp))
    .addAll(transformer.fromCounters(counters, timestamp))
    .addAll(transformer.fromHistograms(histograms, timestamp))
    .addAll(transformer.fromMeters(meters, timestamp))
    .addAll(transformer.fromTimers(timers, timestamp))
    .build();

  sender.send(influxDbMeasurements);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformer.java   
/**
 * Build an {@link InfluxDbMeasurement} from a histogram.
 */
@VisibleForTesting InfluxDbMeasurement fromHistogram(final String metricName, final Histogram h, final long timestamp) {
  final Snapshot snapshot = h.getSnapshot();
  final DropwizardMeasurement measurement = parser.parse(metricName);

  final Map<String, String> tags = new HashMap<>(baseTags);
  tags.putAll(measurement.tags());

  return new InfluxDbMeasurement.Builder(measurement.name(), timestamp)
    .putTags(tags)
    .putField("count", snapshot.size())
    .putField("min", snapshot.getMin())
    .putField("max", snapshot.getMax())
    .putField("mean", snapshot.getMean())
    .putField("std-dev", snapshot.getStdDev())
    .putField("50-percentile", snapshot.getMedian())
    .putField("75-percentile", snapshot.get75thPercentile())
    .putField("95-percentile", snapshot.get95thPercentile())
    .putField("99-percentile", snapshot.get99thPercentile())
    .putField("999-percentile", snapshot.get999thPercentile())
    .putField("run-count", h.getCount())
    .build();
}
项目:oneops    文件:MetricsElasticsearchModule.java   
@Override
public void serialize(JsonHistogram jsonHistogram,
                      JsonGenerator json,
                      SerializerProvider provider) throws IOException {
    json.writeStartObject();
    json.writeStringField("name", jsonHistogram.name());
    json.writeObjectField(timestampFieldname, jsonHistogram.timestampAsDate());
    Histogram histogram = jsonHistogram.value();

    final Snapshot snapshot = histogram.getSnapshot();
    json.writeNumberField("count", histogram.getCount());
    json.writeNumberField("max", snapshot.getMax());
    json.writeNumberField("mean", snapshot.getMean());
    json.writeNumberField("min", snapshot.getMin());
    json.writeNumberField("p50", snapshot.getMedian());
    json.writeNumberField("p75", snapshot.get75thPercentile());
    json.writeNumberField("p95", snapshot.get95thPercentile());
    json.writeNumberField("p98", snapshot.get98thPercentile());
    json.writeNumberField("p99", snapshot.get99thPercentile());
    json.writeNumberField("p999", snapshot.get999thPercentile());

    json.writeNumberField("stddev", snapshot.getStdDev());
    addOneOpsMetadata(json);
    json.writeEndObject();
}
项目:Lagerta    文件:HumanReadableCsvReporter.java   
/** */
private void reportHistogram(String timestamp, String name, Histogram histogram) {
    final Snapshot snapshot = histogram.getSnapshot();

    report(timestamp,
        name,
        "count,max,mean,min,stddev,p50,p75,p90,p95,p98,p99,p999",
        "%d,%d,%f,%d,%f,%f,%f,%f,%f,%f,%f,%f",
        histogram.getCount(),
        snapshot.getMax(),
        snapshot.getMean(),
        snapshot.getMin(),
        snapshot.getStdDev(),
        snapshot.getMedian(),
        snapshot.get75thPercentile(),
        snapshot.getValue(0.9), // Add 90-th percentile to report.
        snapshot.get95thPercentile(),
        snapshot.get98thPercentile(),
        snapshot.get99thPercentile(),
        snapshot.get999thPercentile());
}
项目:semantic-metrics    文件:MetricTypesExample.java   
/**
 * A histogram measures the statistical distribution of values in a stream of data. In addition
 * to minimum, maximum, mean, etc., it also measures median, 75th, 90th, 95th, 98th, 99th, and
 * 99.9th percentiles. This histogram will measure the size of responses in bytes.
 */
private static void reportHistogram() {
    // Create or fetch (if it is already created) the metric.
    final Histogram histogram = registry.histogram(
        APP_PREFIX.tagged("what", "response-size").tagged("endpoint", "/v1/content"));

    // fetch the size of the response
    final long responseSize = 1000;
    // obviously this is gonna keep reporting 1000, but you know ;)

    histogram.update(responseSize);

    // That's it! The rest will be automatically done inside semantic metrics library. The
    // reported measurements will be kept in the registry.
    // Every time the reporter wants to report, different stats and aggregations (min, max,
    // median, 75th, 90th, 95th, 98th, 99th, and 99.9th percentiles) will be calculated and
    // datapoints will be created and reported.
}
项目:semantic-metrics    文件:CustomPercentiles.java   
public static void main(String[] args) throws IOException {
    FastForwardReporter f = FastForwardReporter
        .forRegistry(registry)
        .histogramQuantiles(0.62, 0.55, 0.99)
        .schedule(TimeUnit.SECONDS, 10)
        .build();
    f.start();

    Histogram h = registry.histogram(APP_PREFIX.tagged("what", "stuff"));

    for (int i = 0; i < 100; i++) {
        h.update(i);
    }

    System.out.println("Sending custom percentiles for histogram...");
    System.in.read();
    f.stop();
}
项目:spelk    文件:ElasticsearchReporter.java   
private void reportHistogram(JsonGenerator jsonGenerator, Entry<String, Histogram> entry, String timestampString) {
    try {
        writeStartMetric(entry.getKey(), jsonGenerator, timestampString);
        jsonGenerator.writeStartObject();
        final Histogram histogram = entry.getValue();
        final Snapshot snapshot = histogram.getSnapshot();
        jsonGenerator.writeNumberField("count", histogram.getCount());
        jsonGenerator.writeNumberField("min", convertDuration(snapshot.getMin()));
        jsonGenerator.writeNumberField("max", convertDuration(snapshot.getMax()));
        jsonGenerator.writeNumberField("mean", convertDuration(snapshot.getMean()));
        jsonGenerator.writeNumberField("stddev", convertDuration(snapshot.getStdDev()));
        jsonGenerator.writeNumberField("median", convertDuration(snapshot.getMedian()));
        jsonGenerator.writeNumberField("75th percentile", convertDuration(snapshot.get75thPercentile()));
        jsonGenerator.writeNumberField("95th percentile", convertDuration(snapshot.get95thPercentile()));
        jsonGenerator.writeNumberField("98th percentile", convertDuration(snapshot.get98thPercentile()));
        jsonGenerator.writeNumberField("99th percentile", convertDuration(snapshot.get99thPercentile()));
        jsonGenerator.writeNumberField("999th percentile", convertDuration(snapshot.get999thPercentile()));

        jsonGenerator.writeEndObject();
        writeEndMetric(jsonGenerator);

    } catch (IOException ioe) {
        LOGGER.error("Exception writing metrics to Elasticsearch index: " + ioe.toString());
    }

}
项目:codahale-aggregated-metrics-cloudwatch-reporter    文件:CloudWatchReporter.java   
/**
 * The {@link Snapshot} values of {@link Histogram} are reported as {@link StatisticSet} raw. In other words, the
 * conversion using the duration factor does NOT apply.
 * <p>
 * Please note, the reported values submitted only if they show some data (greater than zero) in order to:
 * <p>
 * 1. save some money
 * 2. prevent com.amazonaws.services.cloudwatch.model.InvalidParameterValueException if empty {@link Snapshot}
 * is submitted
 * <p>
 * If {@link Builder#withZeroValuesSubmission()} is {@code true}, then all values will be submitted
 *
 * @see Histogram#getSnapshot
 */
private void processHistogram(final String metricName, final Histogram histogram, final List<MetricDatum> metricData) {
    final Snapshot snapshot = histogram.getSnapshot();

    if (builder.withZeroValuesSubmission || snapshot.size() > 0) {
        for (final Percentile percentile : builder.percentiles) {
            final double value = snapshot.getValue(percentile.getQuantile());
            stageMetricDatum(true, metricName, value, StandardUnit.None, percentile.getDesc(), metricData);
        }
    }

    // prevent empty snapshot from causing InvalidParameterValueException
    if (snapshot.size() > 0) {
        stageMetricDatum(builder.withArithmeticMean, metricName, snapshot.getMean(), StandardUnit.None, DIMENSION_SNAPSHOT_MEAN, metricData);
        stageMetricDatum(builder.withStdDev, metricName, snapshot.getStdDev(), StandardUnit.None, DIMENSION_SNAPSHOT_STD_DEV, metricData);
        stageMetricDatumWithRawSnapshot(builder.withStatisticSet, metricName, snapshot, StandardUnit.None, metricData);
    }
}
项目:beyondj    文件:JettyServletMetricsActor.java   
private void updateMetrics(MetricsMessageCollection messageCollection) {
    List<MetricsMessage> messages = messageCollection.getMessages();
    for (MetricsMessage message : messages) {
        List<KeyValuePair> pairs = message.getHeaders();
        for (KeyValuePair pair : pairs) {
            String key = pair.getKey();
            for (KeyValuePair keyValuePair : message.getHeaders()) {
                Histogram histogram = metricsRegistry.histogram(HISTOGRAM + key);
                if (keyValuePair.getValueType() == ValueType.DECIMAL && keyValuePair.getDecimalValue()!= null) {
                    histogram.update(Long.valueOf(keyValuePair.getDecimalValue().intValue()));
                } else if (keyValuePair.getValueType() == ValueType.LONG && keyValuePair.getLongValue()!= null) {
                    histogram.update(keyValuePair.getLongValue());
                } else if (keyValuePair.getValueType() == ValueType.INTEGER && keyValuePair.getIntValue()!= null) {
                    histogram.update(keyValuePair.getIntValue());
                }
            }
        }
    }
}
项目:riposte    文件:SignalFxAwareCodahaleMetricsCollectorTest.java   
@DataProvider(value = {
    "null",
    "0",
    "1",
    "2"
}, splitBy = "\\|")
@Test
public void getNamedHistogram_with_iterable_dimensions_creates_dimensioned_histogram_using_sfx_mechanisms(
    Integer numDimensions
) {
    // given
    String histogramName = UUID.randomUUID().toString();
    List<Pair<String, String>> iterableDims = generateIterableDimensions(numDimensions);

    // when
    Histogram result = sfxImpl.getNamedHistogram(histogramName, iterableDims);

    // then
    verifyMetricCreation(histogramBuilderMock, histogramTaggerMock, histogramName, iterableDims, histogramMock, result);
}
项目:riposte    文件:SignalFxEndpointMetricsHandlerTest.java   
@DataProvider(value = {
    "42     |   DAYS",
    "123    |   SECONDS",
    "999    |   MILLISECONDS",
    "3      |   HOURS"
}, splitBy = "\\|")
@Test
public void RollingWindowTimerBuilder_newMetric_creates_new_timer_with_SlidingTimeWindowReservoir_with_expected_values(
    long amount, TimeUnit timeUnit
) {
    // given
    RollingWindowTimerBuilder rwtb = new RollingWindowTimerBuilder(amount, timeUnit);

    // when
    Timer timer = rwtb.newMetric();

    // then
    Histogram histogram = (Histogram) getInternalState(timer, "histogram");
    Reservoir reservoir = (Reservoir) getInternalState(histogram, "reservoir");
    assertThat(reservoir).isInstanceOf(SlidingTimeWindowReservoir.class);
    // The expected value here comes from logic in the SlidingTimeWindowReservoir constructor.
    assertThat(getInternalState(reservoir, "window")).isEqualTo(timeUnit.toNanos(amount) * 256);
}
项目:riposte    文件:SignalFxEndpointMetricsHandlerTest.java   
@DataProvider(value = {
    "42     |   DAYS",
    "123    |   SECONDS",
    "999    |   MILLISECONDS",
    "3      |   HOURS"
}, splitBy = "\\|")
@Test
public void RollingWindowHistogramBuilder_newMetric_creates_new_histogram_with_SlidingTimeWindowReservoir_with_expected_values(
    long amount, TimeUnit timeUnit
) {
    // given
    RollingWindowHistogramBuilder rwhb = new RollingWindowHistogramBuilder(amount, timeUnit);

    // when
    Histogram histogram = rwhb.newMetric();

    // then
    Reservoir reservoir = (Reservoir) getInternalState(histogram, "reservoir");
    assertThat(reservoir).isInstanceOf(SlidingTimeWindowReservoir.class);
    // The expected value here comes from logic in the SlidingTimeWindowReservoir constructor.
    assertThat(getInternalState(reservoir, "window")).isEqualTo(timeUnit.toNanos(amount) * 256);
}
项目:riposte    文件:SignalFxEndpointMetricsHandlerTest.java   
@DataProvider(value = {
    "true   |   true",
    "false  |   false"
}, splitBy = "\\|")
@Test
public void RollingWindowHistogramBuilder_isInstance_works_as_expected(boolean useHistogram, boolean expectedResult) {
    // given
    Metric metric = (useHistogram) ? mock(Histogram.class) : mock(Gauge.class);
    RollingWindowHistogramBuilder rwhb = new RollingWindowHistogramBuilder(42, TimeUnit.DAYS);

    // when
    boolean result = rwhb.isInstance(metric);

    // then
    assertThat(result).isEqualTo(expectedResult);
}
项目:mongoose-base    文件:CustomMetricRegistry.java   
private void notifyListenerOfAddedMetric(
    final MetricRegistryListener listener, final Metric metric, final String name
) {
    if(metric instanceof Gauge) {
        listener.onGaugeAdded(name, (Gauge<?>) metric);
    } else if(metric instanceof Counter) {
        listener.onCounterAdded(name, (Counter) metric);
    } else if(metric instanceof Histogram) {
        listener.onHistogramAdded(name, (Histogram) metric);
    } else if(metric instanceof Meter) {
        listener.onMeterAdded(name, (Meter) metric);
    } else if(metric instanceof Timer) {
        listener.onTimerAdded(name, (Timer) metric);
    } else {
        throw new IllegalArgumentException("Unsupported metric type: " + metric.getClass());
    }
}
项目:mongoose-base    文件:CustomMetricRegistry.java   
private void notifyListenerOfRemovedMetric(
    final String name, final Metric metric, final MetricRegistryListener listener
) {
    if(metric instanceof Gauge) {
        listener.onGaugeRemoved(name);
    } else if(metric instanceof Counter) {
        listener.onCounterRemoved(name);
    } else if(metric instanceof Histogram) {
        listener.onHistogramRemoved(name);
    } else if(metric instanceof Meter) {
        listener.onMeterRemoved(name);
    } else if(metric instanceof Timer) {
        listener.onTimerRemoved(name);
    } else {
        throw new IllegalArgumentException("Unsupported metric type: " + metric.getClass());
    }
}
项目:metrics-circonus    文件:CirconusReporterTest.java   
@Test
public void reportsWithCallback() throws Exception {
  List<String> dynamicTags = new ArrayList<String>();
  dynamicTags.add("status:active");
  dynamicTags.add("speed:29");

  when(callback.getTags()).thenReturn(dynamicTags);

  final Counter counter = mock(Counter.class);
  when(counter.getCount()).thenReturn(100L);

  reporterWithCallback.report(this.<Gauge>map(),
          this.<Counter>map("counter", counter),
          this.<Histogram>map(),
          this.<Meter>map(),
          this.<Timer>map());

  verify(request).addGauge(new CirconusGauge("counter", 100L, timestamp, HOST, dynamicTags));
}
项目:metrics-circonus    文件:CirconusReporterTest.java   
@Test
public void reportsCounters() throws Exception {
  final Counter counter = mock(Counter.class);
  when(counter.getCount()).thenReturn(100L);

  reporter.report(this.<Gauge>map(),
                  this.<Counter>map("counter", counter),
                  this.<Histogram>map(),
                  this.<Meter>map(),
                  this.<Timer>map());

  final InOrder inOrder = inOrder(transport, request);
  inOrder.verify(transport).prepare();
  inOrder.verify(request).addGauge(new CirconusGauge("counter", 100L, timestamp, HOST, tags));
  inOrder.verify(request).send();

  verify(transport).prepare();
  verify(request).send();
  verifyNoMoreInteractions(transport, request);
}
项目:monitoring-center    文件:MetricCollectorImpl.java   
private String getPostfixForMetric(Metric metric) {
    Preconditions.checkNotNull(metric);

    Class<? extends Metric> metricClass = metric.getClass();
    if (Counter.class.isAssignableFrom(metricClass)) {
        return COUNTER_POSTFIX;
    } else if (Gauge.class.isAssignableFrom(metricClass)) {
        return GAUGE_POSTFIX;
    } else if (Timer.class.isAssignableFrom(metricClass)) {
        return TIMER_POSTFIX;
    } else if (Meter.class.isAssignableFrom(metricClass)) {
        return METER_POSTFIX;
    } else if (Histogram.class.isAssignableFrom(metricClass)) {
        return HISTOGRAM_POSTFIX;
    } else {
        return null;
    }
}
项目:monitoring-center    文件:GraphiteMetricFormatter.java   
public String format(SortedMap<String, Metric> metricsByNames) {
    Preconditions.checkNotNull(metricsByNames);

    final long timestamp = System.nanoTime() / 1000;

    StringBuilder outputBuilder = new StringBuilder();
    for (Map.Entry<String, Metric> entry : metricsByNames.entrySet()) {
        String metricOutput = null;
        if (Counter.class.isInstance(entry.getValue())) {
            metricOutput = formatCounter(entry.getKey(), Counter.class.cast(entry.getValue()), timestamp);
        } else if (Gauge.class.isInstance(entry.getValue())) {
            metricOutput = formatGauge(entry.getKey(), Gauge.class.cast(entry.getValue()), timestamp);
        } else if (Timer.class.isInstance(entry.getValue())) {
            metricOutput = formatTimer(entry.getKey(), Timer.class.cast(entry.getValue()), timestamp);
        } else if (Meter.class.isInstance(entry.getValue())) {
            metricOutput = formatMetered(entry.getKey(), Meter.class.cast(entry.getValue()), timestamp);
        } else if (Histogram.class.isInstance(entry.getValue())) {
            metricOutput = formatHistogram(entry.getKey(), Histogram.class.cast(entry.getValue()), timestamp);
        }

        if (metricOutput != null) {
            outputBuilder.append(metricOutput);
        }
    }
    return outputBuilder.toString();
}
项目:JInsight    文件:ApptuitReporter.java   
@Override
public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters,
    SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters,
    SortedMap<String, Timer> timers) {

  DataPointCollector collector = new DataPointCollector(System.currentTimeMillis() / 1000);

  buildReportTimer.time(() -> {
    debug("################");

    debug(">>>>>>>> Guages <<<<<<<<<");
    gauges.forEach(collector::collectGauge);
    debug(">>>>>>>> Counters <<<<<<<<<");
    counters.forEach(collector::collectCounter);
    debug(">>>>>>>> Histograms <<<<<<<<<");
    histograms.forEach(collector::collectHistogram);
    debug(">>>>>>>> Meters <<<<<<<<<");
    meters.forEach(collector::collectMeter);
    debug(">>>>>>>> Timers <<<<<<<<<");
    timers.forEach(collector::collectTimer);

    debug("################");
  });

  sendReportTimer.time(() -> {
    Collection<DataPoint> dataPoints = collector.dataPoints;
    dataPointsReporter.put(dataPoints);
    //dataPoints.forEach(System.out::println);
  });
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testFromHistogram() {
  final Set<String> fieldKeys = ImmutableSet.of(
    "count",
    "min",
    "max",
    "mean",
    "std-dev",
    "50-percentile",
    "75-percentile",
    "95-percentile",
    "99-percentile",
    "999-percentile",
    "run-count"
  );

  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, true);

  when(parser.parse("some.metric.name")).thenReturn(
    DropwizardMeasurement.create("Measurement", MEASUREMENT_TAGS, Optional.empty())
  );

  final Histogram histogram = new Histogram(new ExponentiallyDecayingReservoir());
  histogram.update(15L);
  histogram.update(70L);
  histogram.update(100L);

  final InfluxDbMeasurement measurement = transformer.fromHistogram("some.metric.name", histogram, 90210L);
  assertEquals("should parse name from full metric key", "Measurement", measurement.name());
  assertEquals("should add global and measurement tags", ALL_TAGS, measurement.tags());
  assertEquals("should timestamp measurement", 90210L, measurement.timestamp());
  assertEquals("should add all histogram fields", fieldKeys, measurement.fields().keySet());
}
项目:Lagerta    文件:AdvancedReporter.java   
@Override public void report(
    SortedMap<String, Gauge> gauges,
    SortedMap<String, Counter> counters,
    SortedMap<String, Histogram> histograms,
    SortedMap<String, Meter> meters,
    SortedMap<String, Timer> timers
) {
    if (firstReportTime < 0) {
        firstReportTime = System.currentTimeMillis();
    }
    if (System.currentTimeMillis() - firstReportTime < warmupDuration) {
        return;
    }
    reportStatistics(gauges, counters, histograms, meters, timers);
}
项目:Lagerta    文件:AdvancedReporter.java   
protected abstract void reportStatistics(
    SortedMap<String, Gauge> gauges,
    SortedMap<String, Counter> counters,
    SortedMap<String, Histogram> histograms,
    SortedMap<String, Meter> meters,
    SortedMap<String, Timer> timers
);
项目:hadoop    文件:SLSWebApp.java   
public SLSWebApp(ResourceSchedulerWrapper wrapper, int metricsAddressPort) {
  this.wrapper = wrapper;
  metrics = wrapper.getMetrics();
  handleOperTimecostHistogramMap =
          new HashMap<SchedulerEventType, Histogram>();
  queueAllocatedMemoryCounterMap = new HashMap<String, Counter>();
  queueAllocatedVCoresCounterMap = new HashMap<String, Counter>();
  schedulerMetrics = wrapper.getSchedulerMetrics();
  port = metricsAddressPort;
}
项目:hadoop    文件:ResourceSchedulerWrapper.java   
@Override
public void run() {
  samplerLock.lock();
  try {
    for (Histogram histogram : schedulerHistogramList) {
      Timer timer = histogramTimerMap.get(histogram);
      histogram.update((int) timer.getSnapshot().getMean());
    }
  } finally {
    samplerLock.unlock();
  }
}
项目:dockerized-microservices    文件:GrpcServerInterceptor.java   
/**
 * Intercept all GRPC calls
 * @param serverCall
 * @param metadata
 * @param serverCallHandler
 * @param <ReqT>
 * @param <RespT>
 * @return
 */
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(final ServerCall<ReqT, RespT> serverCall, Metadata metadata, ServerCallHandler<ReqT, RespT> serverCallHandler) {

    Timer.Context timer = metricRegistry.timer(metricName(M_REQ_TIME, serverCall.getMethodDescriptor().getFullMethodName().replace("/", "."))).time();
    Histogram histogram = metricRegistry.histogram(metricName(M_RESPONSE_SIZE, serverCall.getMethodDescriptor().getFullMethodName().replace("/", ".")));

    SimpleForwardingServerCall<ReqT, RespT> nextCall = new SimpleForwardingServerCall<ReqT, RespT>(serverCall) {
        @Override
        public void close(Status status, Metadata trailers) {
            Meter errorMeter = metricRegistry.meter(metricName(ERROR_METRIC, getMethodDescriptor().getFullMethodName().replace("/", ".")));
            if (!status.isOk()) {
                errorMeter.mark();
                log.error("An error occured with {}", serverCall.getMethodDescriptor());
            }

            timer.stop();

            super.close(status, trailers);
        }

        @Override
        public void sendMessage(RespT message) {
            super.sendMessage(message);

            if (message instanceof MessageLite) {
                histogram.update(((MessageLite) message).getSerializedSize());
                log.info("Message sent size = {}", ((MessageLite) message).getSerializedSize());
            }
        }

    };
    return serverCallHandler.startCall(nextCall, metadata);
}
项目:athena    文件:MetricsListCommand.java   
/**
 * Creates a json object for a certain metric.
 *
 * @param metric metric object
 * @return json object
 */
private ObjectNode json(Metric metric) {
    ObjectMapper mapper = new ObjectMapper();
    ObjectNode objectNode = mapper.createObjectNode();
    ObjectNode dataNode = mapper.createObjectNode();

    if (metric instanceof Counter) {
        dataNode.put(COUNTER, ((Counter) metric).getCount());
        objectNode.set(COUNTER, dataNode);
    } else if (metric instanceof Gauge) {
        objectNode.put(VALUE, ((Gauge) metric).getValue().toString());
        objectNode.set(GAUGE, dataNode);
    } else if (metric instanceof Meter) {
        dataNode.put(COUNTER, ((Meter) metric).getCount());
        dataNode.put(MEAN_RATE, ((Meter) metric).getMeanRate());
        dataNode.put(ONE_MIN_RATE, ((Meter) metric).getOneMinuteRate());
        dataNode.put(FIVE_MIN_RATE, ((Meter) metric).getFiveMinuteRate());
        dataNode.put(FIFT_MIN_RATE, ((Meter) metric).getFifteenMinuteRate());
        objectNode.set(METER, dataNode);
    } else if (metric instanceof Histogram) {
        dataNode.put(COUNTER, ((Histogram) metric).getCount());
        dataNode.put(MEAN, ((Histogram) metric).getSnapshot().getMean());
        dataNode.put(MIN, ((Histogram) metric).getSnapshot().getMin());
        dataNode.put(MAX, ((Histogram) metric).getSnapshot().getMax());
        dataNode.put(STDDEV, ((Histogram) metric).getSnapshot().getStdDev());
        objectNode.set(HISTOGRAM, dataNode);
    } else if (metric instanceof Timer) {
        dataNode.put(COUNTER, ((Timer) metric).getCount());
        dataNode.put(MEAN_RATE, ((Timer) metric).getMeanRate());
        dataNode.put(ONE_MIN_RATE, ((Timer) metric).getOneMinuteRate());
        dataNode.put(FIVE_MIN_RATE, ((Timer) metric).getFiveMinuteRate());
        dataNode.put(FIFT_MIN_RATE, ((Timer) metric).getFifteenMinuteRate());
        dataNode.put(MEAN, nanoToMs(((Timer) metric).getSnapshot().getMean()));
        dataNode.put(MIN, nanoToMs(((Timer) metric).getSnapshot().getMin()));
        dataNode.put(MAX, nanoToMs(((Timer) metric).getSnapshot().getMax()));
        dataNode.put(STDDEV, nanoToMs(((Timer) metric).getSnapshot().getStdDev()));
        objectNode.set(TIMER, dataNode);
    }
    return objectNode;
}
项目:aliyun-oss-hadoop-fs    文件:ResourceSchedulerWrapper.java   
@Override
public void run() {
  samplerLock.lock();
  try {
    for (Histogram histogram : schedulerHistogramList) {
      Timer timer = histogramTimerMap.get(histogram);
      histogram.update((int) timer.getSnapshot().getMean());
    }
  } finally {
    samplerLock.unlock();
  }
}
项目:aliyun-oss-hadoop-fs    文件:SLSCapacityScheduler.java   
@Override
public void run() {
  samplerLock.lock();
  try {
    for (Histogram histogram : schedulerHistogramList) {
      Timer timer = histogramTimerMap.get(histogram);
      histogram.update((int) timer.getSnapshot().getMean());
    }
  } finally {
    samplerLock.unlock();
  }
}
项目:semantic-metrics    文件:FastForwardReporter.java   
private void reportHistogram(MetricId key, Histogram value) {
    key = MetricId.join(prefix, key);

    final Metric m = FastForward
        .metric(key.getKey())
        .attributes(key.getTags())
        .attribute(METRIC_TYPE, "histogram");

    reportHistogram(m, value.getSnapshot());
}
项目:metrics-circonus    文件:CirconusReporterTest.java   
@Test
public void reportsByteGaugeValues() throws Exception {
  Gauge gauge = gauge((byte) 1);

  reporter.report(map("gauge", gauge),
          this.<Counter>map(),
          this.<Histogram>map(),
          this.<Meter>map(),
          this.<Timer>map());

  gaugeTestHelper("gauge", (byte) 1, timestamp, HOST, tags);
}
项目:HikariCP    文件:TestMetrics.java   
@Test
public void testMetricUsage() throws SQLException
{
   MetricRegistry metricRegistry = new MetricRegistry();

   HikariConfig config = new HikariConfig();
   config.setMinimumIdle(1);
   config.setMaximumPoolSize(1);
   config.setMetricRegistry(metricRegistry);
   config.setInitializationFailFast(false);
   config.setPoolName("test");
   config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource");

   HikariDataSource ds = new HikariDataSource(config);
   try {
      Connection connection = ds.getConnection();
      UtilityElf.quietlySleep(250L);
      connection.close();

      Histogram histo = metricRegistry.getHistograms(new MetricFilter() {
         /** {@inheritDoc} */
         @Override
         public boolean matches(String name, Metric metric)
         {
            return "test.pool.Usage".equals(MetricRegistry.name("test", "pool", "Usage"));
         }
      }).values().iterator().next();

      Assert.assertEquals(1, histo.getCount());
      double seventyFifth = histo.getSnapshot().get75thPercentile();
      Assert.assertTrue("Seventy-fith percentile less than 250ms: " + seventyFifth, seventyFifth >= 250.0);
   }
   finally {
      ds.close();
   }
}
项目:prometheus-client    文件:Summary.java   
@Override
ChildMetricRepo<Histogram> createChildMetricRepo() {
  if (getLabelNames().isEmpty()) {
    return new UnlabeledChildRepo<>(new MetricData<>(createHistogram()));
  } else {
    return new LabeledChildrenRepo<>(labelValues -> new MetricData<>(createHistogram(), labelValues));
  }
}
项目:codahale-aggregated-metrics-cloudwatch-reporter    文件:CloudWatchReporterTest.java   
@Test
public void shouldReportHistogramSubsequentSnapshotValues_SumMaxMinValues() throws Exception {
    CloudWatchReporter reporter = reporterBuilder.withStatisticSet().build();

    final Histogram slidingWindowHistogram = new Histogram(new SlidingWindowReservoir(4));
    metricRegistry.register("SlidingWindowHistogram", slidingWindowHistogram);

    slidingWindowHistogram.update(1);
    slidingWindowHistogram.update(2);
    slidingWindowHistogram.update(30);
    reporter.report();

    final MetricDatum metricData = metricDatumByDimensionFromCapturedRequest(DIMENSION_SNAPSHOT_SUMMARY);

    assertThat(metricData.getStatisticValues().getMaximum().intValue()).isEqualTo(30);
    assertThat(metricData.getStatisticValues().getMinimum().intValue()).isEqualTo(1);
    assertThat(metricData.getStatisticValues().getSampleCount().intValue()).isEqualTo(3);
    assertThat(metricData.getStatisticValues().getSum().intValue()).isEqualTo(33);
    assertThat(metricData.getUnit()).isEqualTo(None.toString());

    slidingWindowHistogram.update(4);
    slidingWindowHistogram.update(100);
    slidingWindowHistogram.update(5);
    slidingWindowHistogram.update(6);
    reporter.report();

    final MetricDatum secondMetricData = metricDatumByDimensionFromCapturedRequest(DIMENSION_SNAPSHOT_SUMMARY);

    assertThat(secondMetricData.getStatisticValues().getMaximum().intValue()).isEqualTo(100);
    assertThat(secondMetricData.getStatisticValues().getMinimum().intValue()).isEqualTo(4);
    assertThat(secondMetricData.getStatisticValues().getSampleCount().intValue()).isEqualTo(4);
    assertThat(secondMetricData.getStatisticValues().getSum().intValue()).isEqualTo(115);
    assertThat(secondMetricData.getUnit()).isEqualTo(None.toString());

}
项目:metrics-circonus    文件:CirconusReporterTest.java   
@Test
public void reportsDoubleGaugeValues() throws Exception {
  reporter.report(map("gauge", gauge(1.1)),
                  this.<Counter>map(),
                  this.<Histogram>map(),
                  this.<Meter>map(),
                  this.<Timer>map());

  gaugeTestHelper("gauge", 1.1, timestamp, HOST, tags);
}
项目:big-c    文件:SLSWebApp.java   
public SLSWebApp(ResourceSchedulerWrapper wrapper, int metricsAddressPort) {
  this.wrapper = wrapper;
  metrics = wrapper.getMetrics();
  handleOperTimecostHistogramMap =
          new HashMap<SchedulerEventType, Histogram>();
  queueAllocatedMemoryCounterMap = new HashMap<String, Counter>();
  queueAllocatedVCoresCounterMap = new HashMap<String, Counter>();
  schedulerMetrics = wrapper.getSchedulerMetrics();
  port = metricsAddressPort;
}
项目:big-c    文件:ResourceSchedulerWrapper.java   
@Override
public void run() {
  samplerLock.lock();
  try {
    for (Histogram histogram : schedulerHistogramList) {
      Timer timer = histogramTimerMap.get(histogram);
      histogram.update((int) timer.getSnapshot().getMean());
    }
  } finally {
    samplerLock.unlock();
  }
}
项目:riposte    文件:SignalFxAwareCodahaleMetricsCollectorTest.java   
private void verifyRollingWindowHistogramBuilder(MetricBuilder<Histogram> histogramBuilder,
                                             long expectedReportingInterval,
                                             TimeUnit expectedTimeUnit) {
    assertThat(histogramBuilder).isInstanceOf(RollingWindowHistogramBuilder.class);
    assertThat(Whitebox.getInternalState(histogramBuilder, "amount")).isEqualTo(expectedReportingInterval);
    assertThat(Whitebox.getInternalState(histogramBuilder, "timeUnit")).isEqualTo(expectedTimeUnit);
}
项目:riposte    文件:SignalFxAwareCodahaleMetricsCollectorTest.java   
@Test
public void getNamedHistogram_creates_histogram_using_sfx_mechanisms() {
    // given
    String histogramName = UUID.randomUUID().toString();

    // when
    Histogram result = sfxImpl.getNamedHistogram(histogramName);

    // then
    verifyMetricCreation(histogramBuilderMock, histogramTaggerMock, histogramName, histogramMock, result);
}
项目:riposte    文件:SignalFxEndpointMetricsHandlerTest.java   
@Test
public void RollingWindowHistogramBuilder_newMetric_creates_a_new_histogram_with_each_call() {
    // given
    RollingWindowHistogramBuilder rwhb = new RollingWindowHistogramBuilder(42, TimeUnit.DAYS);

    // when
    Histogram firstCallHistogram = rwhb.newMetric();
    Histogram secondCallHistogram = rwhb.newMetric();

    // then
    assertThat(firstCallHistogram).isNotSameAs(secondCallHistogram);
}