Java 类com.codahale.metrics.Gauge 实例源码

项目:snowflake    文件:ZKInt32IdGenerator.java   
public ZKInt32IdGenerator(CuratorFramework client, String lockPath, String sequencePath, String name, long start, int rangeCount) {

        Preconditions.checkNotNull(client);
        Preconditions.checkNotNull(lockPath);
        Preconditions.checkNotNull(sequencePath);
        Preconditions.checkNotNull(name);
        this.client = client;
        client.start();
        RangeStore rangeStore = new ZkRangeStore(name, client, lockPath, sequencePath, 1, TimeUnit.SECONDS, start, rangeCount);
        try {
            start = rangeStore.getNextRange();
        } catch (InterruptedException e) {
            throw new IllegalStateException("ZKInt32IdGenerator 初始值获取失败!!!");
        }
        rangeSequence = new RangeSequence(1, start, rangeCount, rangeStore);
        rangeSequence.start();
        ReporterHolder.metrics.register("ZKInt32.seq." + name + ".currId", (Gauge<Long>) () -> rangeSequence.get());
        timer = ReporterHolder.metrics.timer(name("ZKInt32.seq." + name, "generateId"));
    }
项目:snowflake    文件:SnowflakeServer.java   
public void start() throws Exception {
    client.start();

    leaderSelector = new LeaderSelector(client, SnowflakeConfig.getLeaderPath(), this);
    leaderSelector.autoRequeue();
    LOGGER.info("start SnowflakeServer... ip: {}", getHostname());

    leaderSelector.start();
    while (!hasLeader()) {
        Thread.sleep(1000);
    }

    initWorkerId();

    ReporterHolder.metrics.register(MetricRegistry.name("SnowflakeServer", "workerId"), new Gauge<Integer>() {
        @Override
        public Integer getValue() {
            return workerId;
        }
    });
}
项目:metrics-mackerel    文件:MackerelConfig.java   
@PostConstruct
public void initialize() {
    for (Metric<?> metric : systemPublicMetrics.metrics()) {
        Gauge<Long> metricGauge = () -> metric.getValue().longValue();
        String name = metric.getName();
        if (!name.contains(".")) {
            name += ".total";
        }
        registry.register(name, metricGauge);
    }

    final MackerelSender sender = new MackerelSender(serviceName, apiKey);
    final MackerelReporter reporter = MackerelReporter
            .forRegistry(registry)
            .build(sender);
    reporter.start(1, TimeUnit.MINUTES);
}
项目:verify-hub    文件:IdpAssertionMetricsCollectorTest.java   
@Test
public void shouldCollectNotOnOrAfterValueFromAssertion() {
    DateTimeFreezer.freezeTime();
    MetricRegistry metricRegistry = new MetricRegistry();
    IdpAssertionMetricsCollector idpAssertionMetricsCollector = new IdpAssertionMetricsCollector(metricRegistry);
    DateTime notOnOrAfter = DateTime.now().plusMinutes(15);
    Assertion anAssertion = anAssertion()
            .withIssuer(anIssuer().withIssuerId("testIdP").build())
            .withSubject(aSubject().withSubjectConfirmation(aSubjectConfirmation()
                    .withSubjectConfirmationData(aSubjectConfirmationData()
                            .withNotOnOrAfter(notOnOrAfter)
                            .build())
                    .build())
                    .build())
            .buildUnencrypted();

    idpAssertionMetricsCollector.update(anAssertion);

    Gauge actual = metricRegistry.getGauges().get("notOnOrAfter.testIdP");
    assertThat(actual.getValue()).isEqualTo(15L);
}
项目:dropwizard-influxdb-reporter    文件:InfluxDbMeasurementReporter.java   
@Override
public void report(final SortedMap<String, Gauge> gauges,
                   final SortedMap<String, Counter> counters,
                   final SortedMap<String, Histogram> histograms,
                   final SortedMap<String, Meter> meters,
                   final SortedMap<String, Timer> timers) {
  final long timestamp = clock.instant().toEpochMilli();

  final ImmutableList<InfluxDbMeasurement> influxDbMeasurements = ImmutableList.<InfluxDbMeasurement>builder()
    .addAll(transformer.fromGauges(gauges, timestamp))
    .addAll(transformer.fromCounters(counters, timestamp))
    .addAll(transformer.fromHistograms(histograms, timestamp))
    .addAll(transformer.fromMeters(meters, timestamp))
    .addAll(transformer.fromTimers(timers, timestamp))
    .build();

  sender.send(influxDbMeasurements);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testFromGauges_Ungrouped() {
  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, false);

  final Map<String, Gauge> gauges = ImmutableMap.of(
    "some.stuff.queued", () -> 12,
    "some.stuff.processed", () -> 15
  );

  when(parser.parse("some.stuff.queued")).thenReturn(
    DropwizardMeasurement.create("some.stuff.queued", MEASUREMENT_TAGS, Optional.empty())
  );

  when(parser.parse("some.stuff.processed")).thenReturn(
    DropwizardMeasurement.create("some.stuff.processed", MEASUREMENT_TAGS, Optional.empty())
  );

  final List<InfluxDbMeasurement> expected = ImmutableList.of(
    InfluxDbMeasurement.create("some.stuff.queued", ALL_TAGS, ImmutableMap.of("value", "12i"), 90210L),
    InfluxDbMeasurement.create("some.stuff.processed", ALL_TAGS, ImmutableMap.of("value", "15i"), 90210L)
  );

  final List<InfluxDbMeasurement> measurements = transformer.fromGauges(gauges, 90210L);
  assertEquals("should not group gauge measurements", expected, measurements);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testFromGauges_Grouped() {
  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, true);

  final Map<String, Gauge> gauges = ImmutableMap.of(
    "some.stuff.queued", () -> 12,
    "some.stuff.processed", () -> 15
  );

  when(parser.parse("some.stuff")).thenReturn(
    DropwizardMeasurement.create("some.stuff", MEASUREMENT_TAGS, Optional.empty())
  );

  final List<InfluxDbMeasurement> expected = ImmutableList.of(
    InfluxDbMeasurement.create("some.stuff", ALL_TAGS, ImmutableMap.of("queued", "12i", "processed", "15i"), 90210L)
  );

  final List<InfluxDbMeasurement> measurements = transformer.fromGauges(gauges, 90210L);
  assertEquals("should group gauges by tags and prefix", expected, measurements);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testGroupValues_Inline() {
  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, true);

  final Map<String, Gauge> gauges = ImmutableMap.of(
    "Measurement queued", () -> 12,
    "Measurement processed", () -> 15
  );

  when(parser.parse("Measurement queued")).thenReturn(
    DropwizardMeasurement.create("Measurement", MEASUREMENT_TAGS, Optional.of("queued"))
  );

  when(parser.parse("Measurement processed")).thenReturn(
    DropwizardMeasurement.create("Measurement", MEASUREMENT_TAGS, Optional.of("processed"))
  );

  final Map<GroupKey, Map<String, Object>> expected = ImmutableMap.of(
    GroupKey.create("Measurement", MEASUREMENT_TAGS), ImmutableMap.of("queued", 12, "processed", 15)
  );

  final Map<GroupKey, Map<String, Object>> groups = transformer.groupValues(gauges, "unused_default_key", Gauge::getValue);
  assertEquals("should group values with inlined keys", expected, groups);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testGroupValues_CountingGauges() {
  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, true);

  final Map<String, Gauge> gauges = ImmutableMap.of(
    "some.stuff.queued.count", () -> 12,
    "some.stuff.processed.count", () -> 15
  );

  when(parser.parse("some.stuff")).thenReturn(
    DropwizardMeasurement.create("some.stuff", MEASUREMENT_TAGS, Optional.empty())
  );

  final Map<GroupKey, Map<String, Object>> expected = ImmutableMap.of(
    GroupKey.create("some.stuff", MEASUREMENT_TAGS), ImmutableMap.of("queued.count", 12, "processed.count", 15)
  );

  final Map<GroupKey, Map<String, Object>> groups = transformer.groupValues(gauges, "unused_default_key", Gauge::getValue);
  assertEquals("should ignore .count postfix when parsing groups", expected, groups);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testGroupValues_NoDotIndex() {
  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, true);

  final Map<String, Gauge> gauges = ImmutableMap.of(
    "some_stuff_queued", () -> 12,
    "some_stuff_processed", () -> 15
  );

  when(parser.parse("some_stuff_queued")).thenReturn(
    DropwizardMeasurement.create("some.stuff.queued", MEASUREMENT_TAGS, Optional.empty())
  );

  when(parser.parse("some_stuff_processed")).thenReturn(
    DropwizardMeasurement.create("some.stuff.processed", MEASUREMENT_TAGS, Optional.empty())
  );

  final Map<GroupKey, Map<String, Object>> expected = ImmutableMap.of(
    GroupKey.create("some.stuff.queued", MEASUREMENT_TAGS), ImmutableMap.of("default_key", 12),
    GroupKey.create("some.stuff.processed", MEASUREMENT_TAGS), ImmutableMap.of("default_key", 15)
  );

  final Map<GroupKey, Map<String, Object>> groups = transformer.groupValues(gauges, "default_key", Gauge::getValue);
  assertEquals("should use fully parsed key and default value", expected, groups);
}
项目:dropwizard-influxdb-reporter    文件:DropwizardTransformerTest.java   
@Test
public void testGroupValues_WithDotIndex() {
  final DropwizardMeasurementParser parser = mock(DropwizardMeasurementParser.class);
  final DropwizardTransformer transformer = transformerWithParser(parser, true);

  final Map<String, Gauge> gauges = ImmutableMap.of(
    "some.stuff.queued", () -> 12,
    "some.stuff.processed", () -> 15
  );

  when(parser.parse("some.stuff")).thenReturn(
    DropwizardMeasurement.create("some.stuff", MEASUREMENT_TAGS, Optional.empty())
  );

  final Map<GroupKey, Map<String, Object>> expected = ImmutableMap.of(
    GroupKey.create("some.stuff", MEASUREMENT_TAGS), ImmutableMap.of("queued", 12, "processed", 15)
  );

  final Map<GroupKey, Map<String, Object>> groups = transformer.groupValues(gauges, "unused_default_key", Gauge::getValue);
  assertEquals("should group values by field postfix", expected, groups);
}
项目:QDrill    文件:WorkManager.java   
public void start(final DrillbitEndpoint endpoint, final Controller controller,
    final DataConnectionCreator data, final ClusterCoordinator coord, final PStoreProvider provider) {
  dContext = new DrillbitContext(endpoint, bContext, coord, controller, data, workBus, provider, executor);
  statusThread.start();

  // TODO remove try block once metrics moved from singleton, For now catch to avoid unit test failures
  try {
    dContext.getMetrics().register(
        MetricRegistry.name("drill.exec.work.running_fragments." + dContext.getEndpoint().getUserPort()),
            new Gauge<Integer>() {
              @Override
              public Integer getValue() {
                return runningFragments.size();
              }
        });
  } catch (final IllegalArgumentException e) {
    logger.warn("Exception while registering metrics", e);
  }
}
项目:hadoop    文件:ResourceSchedulerWrapper.java   
private void registerContainerAppNumMetrics() {
  metrics.register("variable.running.application",
    new Gauge<Integer>() {
      @Override
      public Integer getValue() {
        if (scheduler == null || scheduler.getRootQueueMetrics() == null) {
          return 0;
        } else {
          return scheduler.getRootQueueMetrics().getAppsRunning();
        }
      }
    }
  );
  metrics.register("variable.running.container",
    new Gauge<Integer>() {
      @Override
      public Integer getValue() {
        if(scheduler == null || scheduler.getRootQueueMetrics() == null) {
          return 0;
        } else {
          return scheduler.getRootQueueMetrics().getAllocatedContainers();
        }
      }
    }
  );
}
项目:hadoop    文件:FifoSchedulerMetrics.java   
@Override
public void trackQueue(String queueName) {
  trackedQueues.add(queueName);
  FifoScheduler fifo = (FifoScheduler) scheduler;
  // for FifoScheduler, only DEFAULT_QUEUE
  // here the three parameters doesn't affect results
  final QueueInfo queue = fifo.getQueueInfo(queueName, false, false);
  // track currentCapacity, maximumCapacity (always 1.0f)
  metrics.register("variable.queue." + queueName + ".currentcapacity",
    new Gauge<Float>() {
      @Override
      public Float getValue() {
        return queue.getCurrentCapacity();
      }
    }
  );
  metrics.register("variable.queue." + queueName + ".",
    new Gauge<Float>() {
      @Override
      public Float getValue() {
        return queue.getCurrentCapacity();
      }
    }
  );
}
项目:dropwizard-prometheus    文件:DropwizardMetricsExporter.java   
public void writeGauge(String name, Gauge<?> gauge) throws IOException {
    final String sanitizedName = sanitizeMetricName(name);
    writer.writeHelp(sanitizedName, getHelpMessage(name, gauge));
    writer.writeType(sanitizedName, MetricType.GAUGE);

    Object obj = gauge.getValue();
    double value;
    if (obj instanceof Number) {
        value = ((Number) obj).doubleValue();
    } else if (obj instanceof Boolean) {
        value = ((Boolean) obj) ? 1 : 0;
    } else {
        LOG.warn("Invalid type for Gauge {}: {}", name, obj.getClass().getName());
        return;
    }

    writer.writeSample(sanitizedName, emptyMap(), value);
}
项目:athena    文件:EventMetric.java   
/**
 * Registers the metrics.
 */
public void registerMetrics() {
    metricsComponent = metricsService.registerComponent(componentName);
    metricsFeature = metricsComponent.registerFeature(featureName);

    lastEventTimestampEpochMs = 0;
    lastEventTimestampGauge =
        metricsService.registerMetric(metricsComponent,
                                      metricsFeature,
                                      GAUGE_TIMESTAMP_NAME,
                                      new Gauge<Long>() {
                                          @Override
                                          public Long getValue() {
                                              return lastEventTimestampEpochMs;
                                          }
                                      });

    eventRateMeter = metricsService.createMeter(metricsComponent,
                                                metricsFeature,
                                                METER_RATE_NAME);
}
项目:athena    文件:IntentEventsMetricsCommand.java   
/**
 * Prints an Event Metric.
 *
 * @param operationStr the string with the intent operation to print
 * @param eventMetric the Event Metric to print
 */
private void printEventMetric(String operationStr,
                              EventMetric eventMetric) {
    Gauge<Long> gauge = eventMetric.lastEventTimestampGauge();
    Meter meter = eventMetric.eventRateMeter();
    TimeUnit rateUnit = TimeUnit.SECONDS;
    double rateFactor = rateUnit.toSeconds(1);

    // Print the Gauge
    print(FORMAT_GAUGE, operationStr, gauge.getValue());

    // Print the Meter
    print(FORMAT_METER, operationStr, meter.getCount(),
          meter.getMeanRate() * rateFactor,
          meter.getOneMinuteRate() * rateFactor,
          meter.getFiveMinuteRate() * rateFactor,
          meter.getFifteenMinuteRate() * rateFactor);
}
项目:athena    文件:TopologyEventsMetricsCommand.java   
/**
 * Prints an Event Metric.
 *
 * @param operationStr the string with the intent operation to print
 * @param eventMetric the Event Metric to print
 */
private void printEventMetric(String operationStr,
                              EventMetric eventMetric) {
    Gauge<Long> gauge = eventMetric.lastEventTimestampGauge();
    Meter meter = eventMetric.eventRateMeter();
    TimeUnit rateUnit = TimeUnit.SECONDS;
    double rateFactor = rateUnit.toSeconds(1);

    // Print the Gauge
    print(FORMAT_GAUGE, operationStr, gauge.getValue());

    // Print the Meter
    print(FORMAT_METER, operationStr, meter.getCount(),
          meter.getMeanRate() * rateFactor,
          meter.getOneMinuteRate() * rateFactor,
          meter.getFiveMinuteRate() * rateFactor,
          meter.getFifteenMinuteRate() * rateFactor);
}
项目:dremio-oss    文件:CoordExecService.java   
@Override
public void start() throws Exception {
  fabricService.get().registerProtocol(new CoordExecProtocol());

  final String prefix = "rpc";
  Metrics.registerGauge(prefix + "bit.control.current", new Gauge<Long>() {
    @Override
    public Long getValue() {
      return allocator.getAllocatedMemory();
    }
  });

  Metrics.registerGauge(prefix + "bit.control.peak", new Gauge<Long>() {
    @Override
    public Long getValue() {
      return allocator.getPeakMemoryAllocation();
    }
  });
}
项目:dremio-oss    文件:FragmentExecutors.java   
public FragmentExecutors(
  final ExecToCoordTunnelCreator tunnelCreator,
  final ExitCallback callback,
  final TaskPool pool,
  final OptionManager options) {
  this.callback = callback;
  this.pool = pool;
  this.evictionDelayMillis = TimeUnit.SECONDS.toMillis(
    options.getOption(ExecConstants.FRAGMENT_CACHE_EVICTION_DELAY_S));

  Metrics.registerGauge(MetricRegistry.name("dremio.exec.work.running_fragments"), new Gauge<Integer>() {
    @Override
    public Integer getValue() {
      return size();
    }
  });

  initEvictionThread(evictionDelayMillis);
}
项目:hawkular-dropwizard-reporter    文件:HawkularReporterTest.java   
@Test
public void shouldReportDoubleGauge() {
    HawkularReporter reporter = HawkularReporter.builder(registry, "unit-test")
            .useHttpClient(uri -> client)
            .build();

    final Gauge<Double> gauge = () -> 1.5d;
    registry.register("gauge.double", gauge);
    reporter.report();

    assertThat(client.getMetricsRestCalls()).hasSize(1);
    JSONObject metrics = new JSONObject(client.getMetricsRestCalls().get(0));
    assertThat(metrics.keySet()).containsOnly("gauges");

    JSONArray gaugesJson = metrics.getJSONArray("gauges");
    Map<String, Double> values = StreamSupport.stream(gaugesJson.spliterator(), false)
            .collect(toMap(idFromRoot::extract, dValueFromRoot::extract));
    assertThat(values).containsOnly(entry("gauge.double", 1.5d));
}
项目:aliyun-oss-hadoop-fs    文件:FifoSchedulerMetrics.java   
@Override
public void trackQueue(String queueName) {
  trackedQueues.add(queueName);
  FifoScheduler fifo = (FifoScheduler) scheduler;
  // for FifoScheduler, only DEFAULT_QUEUE
  // here the three parameters doesn't affect results
  final QueueInfo queue = fifo.getQueueInfo(queueName, false, false);
  // track currentCapacity, maximumCapacity (always 1.0f)
  metrics.register("variable.queue." + queueName + ".currentcapacity",
    new Gauge<Float>() {
      @Override
      public Float getValue() {
        return queue.getCurrentCapacity();
      }
    }
  );
  metrics.register("variable.queue." + queueName + ".",
    new Gauge<Float>() {
      @Override
      public Float getValue() {
        return queue.getCurrentCapacity();
      }
    }
  );
}
项目:hawkular-dropwizard-reporter    文件:HawkularReporterTest.java   
@Test
public void shouldReportBigIntegerGauge() {
    HawkularReporter reporter = HawkularReporter.builder(registry, "unit-test")
            .useHttpClient(uri -> client)
            .build();

    final Gauge<BigInteger> gauge = () -> new BigInteger("2");
    registry.register("gauge.bigi", gauge);
    reporter.report();

    assertThat(client.getMetricsRestCalls()).hasSize(1);
    JSONObject metrics = new JSONObject(client.getMetricsRestCalls().get(0));
    assertThat(metrics.keySet()).containsOnly("gauges");

    JSONArray gaugesJson = metrics.getJSONArray("gauges");
    Map<String, Double> values = StreamSupport.stream(gaugesJson.spliterator(), false)
            .collect(toMap(idFromRoot::extract, dValueFromRoot::extract));
    assertThat(values).containsOnly(entry("gauge.bigi", 2d));
}
项目:metrics-circonus    文件:CirconusReporterTest.java   
@Test
public void reportsWithCallback() throws Exception {
  List<String> dynamicTags = new ArrayList<String>();
  dynamicTags.add("status:active");
  dynamicTags.add("speed:29");

  when(callback.getTags()).thenReturn(dynamicTags);

  final Counter counter = mock(Counter.class);
  when(counter.getCount()).thenReturn(100L);

  reporterWithCallback.report(this.<Gauge>map(),
          this.<Counter>map("counter", counter),
          this.<Histogram>map(),
          this.<Meter>map(),
          this.<Timer>map());

  verify(request).addGauge(new CirconusGauge("counter", 100L, timestamp, HOST, dynamicTags));
}
项目:quarks    文件:MetricsEverywhereTest.java   
@Override
public final void initialize(OpletContext<T, T> context) {
    super.initialize(context);

    this.meter = new Meter();
    this.gauge = new Gauge<Long>() {
        @Override
        public Long getValue() {
            return System.currentTimeMillis();
        }
    };

    MetricRegistry registry = context.getService(MetricRegistry.class);
    if (registry != null) {
        registry.register(context.uniquify("testMeter"), meter);
        registry.register(context.uniquify("testGauge"), gauge);
    }
}
项目:chaperone    文件:AuditReporter.java   
AuditReporter(int queueSize, long timeBucketIntervalInSec, int reportFreqMsgCount, int reportFreqIntervalSec,
    boolean combineMetricsAmongHosts) {
  reportExecutor =
      Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(getType() + "-audit-reporter-%d")
          .build());;

  queueSize = Util.ceilingNextPowerOfTwo(queueSize);
  disruptor = new Disruptor<AuditMsgReportTask>(new AuditMsgReportTaskFactory(), queueSize, reportExecutor);
  disruptor.handleEventsWith(new AuditMsgReportTaskHandler(this));
  ringBuffer = disruptor.getRingBuffer();

  aggregator =
      new AuditAggregator(timeBucketIntervalInSec, reportFreqMsgCount, reportFreqIntervalSec,
          combineMetricsAmongHosts);

  SUBMITTED_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.submittedNumber");
  FAILED_TO_SUBMIT_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.failedToSubmitNumber");
  REPORTED_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.reportedNumber");
  FAILED_TO_REPORT_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.failedToReportNumber");
  Metrics.getRegistry().register(getType() + ".auditReporter.queueSize", new Gauge<Integer>() {
    @Override
    public Integer getValue() {
      return (int) disruptor.getRingBuffer().remainingCapacity();
    }
  });
}
项目:semantic-metrics    文件:SemanticMetricRegistry.java   
private void notifyListenerOfRemovedMetric(
    final MetricId name, final Metric metric, final SemanticMetricRegistryListener listener
) {
    if (metric instanceof Gauge) {
        listener.onGaugeRemoved(name);
    } else if (metric instanceof Counter) {
        listener.onCounterRemoved(name);
    } else if (metric instanceof Histogram) {
        listener.onHistogramRemoved(name);
    } else if (metric instanceof Meter) {
        listener.onMeterRemoved(name);
    } else if (metric instanceof Timer) {
        listener.onTimerRemoved(name);
    } else if (metric instanceof DerivingMeter) {
        listener.onDerivingMeterRemoved(name);
    } else {
        throw new IllegalArgumentException("Unknown metric type: " + metric.getClass());
    }
}
项目:gemfire-copy    文件:ToolBox.java   
public static void addTimerForPdxTypeMetrics(final ClientCache clientCache) {

        Region temp = clientCache.getRegion("PdxTypes");
        if (temp == null) {
            temp = clientCache.createClientRegionFactory(ClientRegionShortcut.PROXY).create("PdxTypes");
        }
        final Region pdxRegions = temp;

        metricRegistry.register(MetricRegistry.name("PdxTypes", "count"),
                (Gauge<Integer>) () -> pdxRegions.keySetOnServer().size());

        ConsoleReporter reporter = ConsoleReporter.forRegistry(metricRegistry)
                .convertRatesTo(TimeUnit.SECONDS)
                .convertDurationsTo(TimeUnit.MILLISECONDS)
                .build();
        reporter.start(1, TimeUnit.MINUTES);
    }
项目:HttpSessionReplacer    文件:TestJedisPoolFacade.java   
@Test
public void testMetrics() {
  MetricRegistry metrics = mock(MetricRegistry.class);
  Client client = mock(Client.class);
  when(client.getHost()).thenReturn("myhost");
  when(jedis.getClient()).thenReturn(client);
  when(pool.getNumActive()).thenReturn(1);
  when(pool.getNumIdle()).thenReturn(2);
  when(pool.getNumWaiters()).thenReturn(3);
  rf.startMonitoring(metrics);
  @SuppressWarnings("rawtypes")
  ArgumentCaptor<Gauge> gauge = ArgumentCaptor.forClass(Gauge.class);
  verify(metrics).register(eq("com.amadeus.session.redis.myhost.active"), gauge.capture());
  verify(metrics).register(eq("com.amadeus.session.redis.myhost.idle"), gauge.capture());
  verify(metrics).register(eq("com.amadeus.session.redis.myhost.waiting"), gauge.capture());
  assertEquals(1, gauge.getAllValues().get(0).getValue());
  assertEquals(2, gauge.getAllValues().get(1).getValue());
  assertEquals(3, gauge.getAllValues().get(2).getValue());
}
项目:beam    文件:WithMetricsSupport.java   
private Maps.EntryTransformer<String, Object, Gauge> toGauge() {
  return new Maps.EntryTransformer<String, Object, Gauge>() {

    @Override
    public Gauge transformEntry(final String name, final Object rawValue) {
      return new Gauge<Double>() {

        @Override
        public Double getValue() {
          // at the moment the metric's type is assumed to be
          // compatible with Double. While far from perfect, it seems reasonable at
          // this point in time
          try {
            return Double.parseDouble(rawValue.toString());
          } catch (final Exception e) {
            LOG.warn("Failed reporting metric with name [{}], of type [{}], since it could not be"
                + " converted to double", name, rawValue.getClass().getSimpleName(), e);
            return null;
          }
        }
      };
    }
  };
}
项目:incubator-gossip    文件:DataEventManager.java   
public DataEventManager(MetricRegistry metrics) {
  perNodeDataHandlers = new CopyOnWriteArrayList<>();
  perNodeDataHandlerQueue = new ArrayBlockingQueue<>(DataEventConstants.PER_NODE_DATA_QUEUE_SIZE);
  perNodeDataEventExecutor = new ThreadPoolExecutor(
          DataEventConstants.PER_NODE_DATA_CORE_POOL_SIZE,
          DataEventConstants.PER_NODE_DATA_MAX_POOL_SIZE,
          DataEventConstants.PER_NODE_DATA_KEEP_ALIVE_TIME_SECONDS, TimeUnit.SECONDS,
          perNodeDataHandlerQueue, new ThreadPoolExecutor.DiscardOldestPolicy());

  sharedDataHandlers = new CopyOnWriteArrayList<>();
  sharedDataHandlerQueue = new ArrayBlockingQueue<>(DataEventConstants.SHARED_DATA_QUEUE_SIZE);
  sharedDataEventExecutor = new ThreadPoolExecutor(DataEventConstants.SHARED_DATA_CORE_POOL_SIZE,
          DataEventConstants.SHARED_DATA_MAX_POOL_SIZE,
          DataEventConstants.SHARED_DATA_KEEP_ALIVE_TIME_SECONDS, TimeUnit.SECONDS,
          sharedDataHandlerQueue, new ThreadPoolExecutor.DiscardOldestPolicy());

  metrics.register(DataEventConstants.PER_NODE_DATA_SUBSCRIBERS_SIZE,
          (Gauge<Integer>) () -> perNodeDataHandlers.size());
  metrics.register(DataEventConstants.PER_NODE_DATA_SUBSCRIBERS_QUEUE_SIZE,
          (Gauge<Integer>) () -> perNodeDataHandlerQueue.size());
  metrics.register(DataEventConstants.SHARED_DATA_SUBSCRIBERS_SIZE,
          (Gauge<Integer>) () -> sharedDataHandlers.size());
  metrics.register(DataEventConstants.SHARED_DATA_SUBSCRIBERS_QUEUE_SIZE,
          (Gauge<Integer>) () -> sharedDataHandlerQueue.size());

}
项目:drill    文件:WorkManager.java   
public void start(
    final DrillbitEndpoint endpoint,
    final Controller controller,
    final DataConnectionCreator data,
    final ClusterCoordinator coord,
    final PersistentStoreProvider provider,
    final PersistentStoreProvider profilesProvider) {
  dContext = new DrillbitContext(endpoint, bContext, coord, controller, data, workBus, provider, profilesProvider);
  statusThread.start();

  DrillMetrics.register("drill.fragments.running",
      new Gauge<Integer>() {
        @Override
        public Integer getValue() {
          return runningFragments.size();
        }
      });
}
项目:riposte    文件:SignalFxAwareCodahaleMetricsCollectorTest.java   
private <M extends Metric, V> void verifyMetricRegistration(
    Tagger<Gauge<V>> taggerMock, String gaugeName, List<Pair<String, String>> dimensions,
    Gauge<V> expectedGauge, Gauge<V> actualGauge
) {
    int numDimensions = (dimensions == null) ? 0 : dimensions.size();

    ArgumentCaptor<Gauge> gaugeArgumentCaptor = ArgumentCaptor.forClass(Gauge.class);
    verify(metricMetadataMock).forMetric(gaugeArgumentCaptor.capture());
    verify(taggerMock).withMetricName(gaugeName);
    if (numDimensions == 0) {
        verify(taggerMock, never()).withDimension(anyString(), anyString());
    }
    else {
        for (Pair<String, String> dimension : dimensions) {
            verify(taggerMock).withDimension(dimension.getKey(), dimension.getValue());
        }
    }
    verify(taggerMock).register(metricRegistryMock);
    verifyNoMoreInteractions(metricMetadataMock, taggerMock);

    Gauge gaugeRegistered = gaugeArgumentCaptor.getValue();
    assertThat(gaugeRegistered).isNotNull();
    assertThat(gaugeRegistered).isSameAs(actualGauge);
    assertThat(actualGauge).isSameAs(expectedGauge);
}
项目:riposte    文件:SignalFxEndpointMetricsHandlerTest.java   
@DataProvider(value = {
    "true   |   true",
    "false  |   false"
}, splitBy = "\\|")
@Test
public void RollingWindowTimerBuilder_isInstance_works_as_expected(boolean useTimer, boolean expectedResult) {
    // given
    Metric metric = (useTimer) ? mock(Timer.class) : mock(Gauge.class);
    RollingWindowTimerBuilder rwtb = new RollingWindowTimerBuilder(42, TimeUnit.DAYS);

    // when
    boolean result = rwtb.isInstance(metric);

    // then
    assertThat(result).isEqualTo(expectedResult);
}
项目:chaperone    文件:KafkaMonitor.java   
private void injectMetrics(final TopicAndPartition topicAndPartition) {
  if (!partitionInjected.contains(topicAndPartition)) {
    Metrics.getRegistry().register(
        String.format(OFFSET_LAG_NAME_FORMAT, topicAndPartition.topic(), topicAndPartition.partition()),
        new Gauge<Long>() {
          @Override
          public Long getValue() {
            if (partitionLag.containsKey(topicAndPartition)) {
              return partitionLag.get(topicAndPartition);
            } else {
              return -1L;
            }
          }
        });
    partitionInjected.add(topicAndPartition);
  }
}
项目:mongoose-base    文件:CustomMetricRegistry.java   
private void notifyListenerOfRemovedMetric(
    final String name, final Metric metric, final MetricRegistryListener listener
) {
    if(metric instanceof Gauge) {
        listener.onGaugeRemoved(name);
    } else if(metric instanceof Counter) {
        listener.onCounterRemoved(name);
    } else if(metric instanceof Histogram) {
        listener.onHistogramRemoved(name);
    } else if(metric instanceof Meter) {
        listener.onMeterRemoved(name);
    } else if(metric instanceof Timer) {
        listener.onTimerRemoved(name);
    } else {
        throw new IllegalArgumentException("Unsupported metric type: " + metric.getClass());
    }
}
项目:metrics-jvm-extras    文件:NativeMemoryUsageGaugeSetUnit0Test.java   
@SuppressWarnings("rawtypes")
@Test
public void testGetMetrics() throws Exception {
    when(smaps.get(KEY.VSS)).thenReturn(1L);
    when(smaps.get(KEY.RSS)).thenReturn(2L);
    when(smaps.get(KEY.PSS)).thenReturn(3L);
    when(smaps.get(KEY.SWAP)).thenReturn(4L);
    when(smaps.get(KEY.SWAPPSS)).thenReturn(5L);

    final NativeMemoryUsageGaugeSet uut = new NativeMemoryUsageGaugeSet(smaps);

    final Map<String, Metric> metrics = uut.getMetrics();

    assertNotNull(metrics);
    assertEquals(5, metrics.keySet().size());
    assertTrue(metrics.keySet()
            .containsAll(Arrays.asList("vss", "rss", "pss", "swap", "swappss")));

    assertEquals(1L, ((Gauge) metrics.get("vss")).getValue());
    assertEquals(2L, ((Gauge) metrics.get("rss")).getValue());
    assertEquals(3L, ((Gauge) metrics.get("pss")).getValue());
    assertEquals(4L, ((Gauge) metrics.get("swap")).getValue());
    assertEquals(5L, ((Gauge) metrics.get("swappss")).getValue());
}
项目:https-github.com-g0t4-jenkins2-course-spring-boot    文件:MetricRepositoryAutoConfigurationTests.java   
@Test
public void dropwizardInstalledIfPresent() {
    this.context = new AnnotationConfigApplicationContext(
            MetricsDropwizardAutoConfiguration.class,
            MetricRepositoryAutoConfiguration.class, AopAutoConfiguration.class);
    GaugeService gaugeService = this.context.getBean(GaugeService.class);
    assertThat(gaugeService).isNotNull();
    gaugeService.submit("foo", 2.7);
    DropwizardMetricServices exporter = this.context
            .getBean(DropwizardMetricServices.class);
    assertThat(exporter).isEqualTo(gaugeService);
    MetricRegistry registry = this.context.getBean(MetricRegistry.class);
    @SuppressWarnings("unchecked")
    Gauge<Double> gauge = (Gauge<Double>) registry.getMetrics().get("gauge.foo");
    assertThat(gauge.getValue()).isEqualTo(new Double(2.7));
}
项目:sstable-adaptor    文件:BufferPoolMetrics.java   
public BufferPoolMetrics()
{
    misses = Metrics.meter(factory.createMetricName("Misses"));

    size = Metrics.register(factory.createMetricName("Size"), new Gauge<Long>()
    {
        public Long getValue()
        {
            return BufferPool.sizeInBytes();
        }
    });
}
项目:sstable-adaptor    文件:ThreadPoolMetrics.java   
/**
 * Create metrics for given ThreadPoolExecutor.
 *
 * @param executor Thread pool
 * @param path Type of thread pool
 * @param poolName Name of thread pool to identify metrics
 */
public ThreadPoolMetrics(final ThreadPoolExecutor executor, String path, String poolName)
{
    this.factory = new ThreadPoolMetricNameFactory("ThreadPools", path, poolName);

    activeTasks = Metrics.register(factory.createMetricName("ActiveTasks"), new Gauge<Integer>()
    {
        public Integer getValue()
        {
            return executor.getActiveCount();
        }
    });
    totalBlocked = Metrics.counter(factory.createMetricName("TotalBlockedTasks"));
    currentBlocked = Metrics.counter(factory.createMetricName("CurrentlyBlockedTasks"));
    completedTasks = Metrics.register(factory.createMetricName("CompletedTasks"), new Gauge<Long>()
    {
        public Long getValue()
        {
            return executor.getCompletedTaskCount();
        }
    });
    pendingTasks = Metrics.register(factory.createMetricName("PendingTasks"), new Gauge<Long>()
    {
        public Long getValue()
        {
            return executor.getTaskCount() - executor.getCompletedTaskCount();
        }
    });
    maxPoolSize = Metrics.register(factory.createMetricName("MaxPoolSize"), new Gauge<Integer>()
    {
        public Integer getValue()
        {
            return executor.getMaximumPoolSize();
        }
    });
}