Java 类com.amazonaws.services.cloudwatch.model.Datapoint 实例源码

项目:aws-client    文件:Controller.java   
private double getInstanceAverageLoad(AmazonCloudWatchClient cloudWatchClient, String instanceId) {

        long offsetInMilliseconds = 1000 * 60 * 60;
        GetMetricStatisticsRequest request = new GetMetricStatisticsRequest()
                .withStartTime(new Date(new Date().getTime() - offsetInMilliseconds))
                .withNamespace("AWS/EC2")
                .withPeriod(60 * 60)
                .withDimensions(new Dimension().withName("InstanceId").withValue(instanceId))
                .withMetricName("CPUUtilization")
                .withStatistics("Average", "Maximum")
                .withEndTime(new Date());
        GetMetricStatisticsResult getMetricStatisticsResult = cloudWatchClient.getMetricStatistics(request);

        double avgCPUUtilization = 0;
        List dataPoint = getMetricStatisticsResult.getDatapoints();
        for (Object aDataPoint : dataPoint) {
            Datapoint dp = (Datapoint) aDataPoint;
            avgCPUUtilization = dp.getAverage();
        }

        return avgCPUUtilization;
    }
项目:photon-model    文件:TestAWSUtils.java   
@SuppressWarnings("deprecation")
@Test
public void testAverageBurnRateCalculation() {
    List<Datapoint> dpList = new ArrayList<>();
    dpList.add(getDatapoint(1.0, new Date(2016, 10, 9, 5, 00, 00)));
    dpList.add(getDatapoint(2.0, new Date(2016, 10, 9, 9, 00, 00)));
    dpList.add(getDatapoint(3.0, new Date(2016, 10, 9, 13, 00, 00)));
    dpList.add(getDatapoint(4.0, new Date(2016, 10, 9, 17, 00, 00)));
    dpList.add(getDatapoint(5.0, new Date(2016, 10, 9, 21, 00, 00)));
    dpList.add(getDatapoint(6.0, new Date(2016, 10, 10, 1, 00, 00)));
    dpList.add(getDatapoint(7.0, new Date(2016, 10, 10, 5, 00, 00)));
    dpList.add(getDatapoint(8.0, new Date(2016, 10, 10, 9, 00, 00)));
    dpList.add(getDatapoint(9.0, new Date(2016, 10, 10, 13, 00, 00)));
    dpList.add(getDatapoint(10.0, new Date(2016, 10, 10, 17, 00, 00)));
    Double burnRate = AWSUtils.calculateAverageBurnRate(dpList);
    assertTrue("Received a null response", burnRate != null);
    assertTrue("BurnRate value is negative", burnRate > 0.0);
}
项目:photon-model    文件:TestAWSUtils.java   
@SuppressWarnings("deprecation")
@Test
public void testAverageBurnRateCalculationWithResetToZero() {
    List<Datapoint> dpList = new ArrayList<>();
    dpList.add(getDatapoint(4.0, new Date(2016, 10, 9, 5, 00, 00)));
    dpList.add(getDatapoint(5.0, new Date(2016, 10, 9, 9, 00, 00)));
    dpList.add(getDatapoint(6.0, new Date(2016, 10, 9, 13, 00, 00)));
    dpList.add(getDatapoint(7.0, new Date(2016, 10, 9, 17, 00, 00)));
    dpList.add(getDatapoint(8.0, new Date(2016, 10, 9, 21, 00, 00)));
    dpList.add(getDatapoint(9.0, new Date(2016, 10, 10, 1, 00, 00)));
    dpList.add(getDatapoint(0.0, new Date(2016, 10, 10, 5, 00, 00)));
    dpList.add(getDatapoint(1.0, new Date(2016, 10, 10, 9, 00, 00)));
    dpList.add(getDatapoint(2.0, new Date(2016, 10, 10, 13, 00, 00)));
    dpList.add(getDatapoint(3.0, new Date(2016, 10, 10, 17, 00, 00)));
    Double burnRate = AWSUtils.calculateAverageBurnRate(dpList);
    assertTrue("Received a null response", burnRate != null);
    assertTrue("BurnRate value is negative", burnRate > 0.0);
}
项目:photon-model    文件:TestAWSUtils.java   
@SuppressWarnings("deprecation")
@Test
public void testCurrentBurnRateCalculation() {
    List<Datapoint> dpList = new ArrayList<>();
    dpList.add(getDatapoint(1.0, new Date(2016, 10, 9, 5, 00, 00)));
    dpList.add(getDatapoint(2.0, new Date(2016, 10, 9, 9, 00, 00)));
    dpList.add(getDatapoint(3.0, new Date(2016, 10, 9, 13, 00, 00)));
    dpList.add(getDatapoint(4.0, new Date(2016, 10, 9, 17, 00, 00)));
    dpList.add(getDatapoint(5.0, new Date(2016, 10, 9, 21, 00, 00)));
    dpList.add(getDatapoint(6.0, new Date(2016, 10, 10, 1, 00, 00)));
    dpList.add(getDatapoint(7.0, new Date(2016, 10, 10, 5, 00, 00)));
    dpList.add(getDatapoint(8.0, new Date(2016, 10, 10, 9, 00, 00)));
    Double burnRate = AWSUtils.calculateCurrentBurnRate(dpList);
    assertTrue("Received a null response", burnRate != null);
    assertTrue("BurnRate value is negative", burnRate > 0.0);
}
项目:photon-model    文件:TestAWSUtils.java   
@SuppressWarnings("deprecation")
@Test
public void testCurrentBurnRateCalculationWithResetToZero() {
    List<Datapoint> dpList = new ArrayList<>();
    dpList.add(getDatapoint(4.0, new Date(2016, 10, 9, 13, 00, 00)));
    dpList.add(getDatapoint(5.0, new Date(2016, 10, 9, 17, 00, 00)));
    dpList.add(getDatapoint(6.0, new Date(2016, 10, 9, 21, 00, 00)));
    dpList.add(getDatapoint(7.0, new Date(2016, 10, 10, 1, 00, 00)));
    dpList.add(getDatapoint(0.0, new Date(2016, 10, 10, 5, 00, 00)));
    dpList.add(getDatapoint(1.0, new Date(2016, 10, 10, 9, 00, 00)));
    dpList.add(getDatapoint(2.0, new Date(2016, 10, 10, 13, 00, 00)));
    dpList.add(getDatapoint(3.0, new Date(2016, 10, 10, 17, 00, 00)));
    Double burnRate = AWSUtils.calculateCurrentBurnRate(dpList);
    assertTrue("Received a null response", burnRate != null);
    assertTrue("BurnRate value is negative", burnRate > 0.0);
}
项目:beam    文件:SimplifiedKinesisClient.java   
/**
 * Gets total size in bytes of all events that remain in Kinesis stream between specified
 * instants.
 *
 * @return total size in bytes of all Kinesis events after specified instant
 */
public long getBacklogBytes(final String streamName, final Instant countSince,
    final Instant countTo) throws TransientKinesisException {
  return wrapExceptions(new Callable<Long>() {

    @Override
    public Long call() throws Exception {
      Minutes period = Minutes.minutesBetween(countSince, countTo);
      if (period.isLessThan(Minutes.ONE)) {
        return 0L;
      }

      GetMetricStatisticsRequest request = createMetricStatisticsRequest(streamName,
          countSince, countTo, period);

      long totalSizeInBytes = 0;
      GetMetricStatisticsResult result = cloudWatch.getMetricStatistics(request);
      for (Datapoint point : result.getDatapoints()) {
        totalSizeInBytes += point
            .getSum()
            .longValue();
      }
      return totalSizeInBytes;
    }
  });
}
项目:beam    文件:SimplifiedKinesisClientTest.java   
@Test
public void shouldCountBytesWhenSingleDataPointReturned() throws Exception {
  Instant countSince = new Instant("2017-04-06T10:00:00.000Z");
  Instant countTo = new Instant("2017-04-06T11:00:00.000Z");
  Minutes periodTime = Minutes.minutesBetween(countSince, countTo);
  GetMetricStatisticsRequest metricStatisticsRequest =
      underTest.createMetricStatisticsRequest(STREAM, countSince, countTo, periodTime);
  GetMetricStatisticsResult result = new GetMetricStatisticsResult()
      .withDatapoints(new Datapoint().withSum(1.0));

  given(cloudWatch.getMetricStatistics(metricStatisticsRequest)).willReturn(result);

  long backlogBytes = underTest.getBacklogBytes(STREAM, countSince, countTo);

  assertThat(backlogBytes).isEqualTo(1L);
}
项目:beam    文件:SimplifiedKinesisClientTest.java   
@Test
public void shouldCountBytesWhenMultipleDataPointsReturned() throws Exception {
  Instant countSince = new Instant("2017-04-06T10:00:00.000Z");
  Instant countTo = new Instant("2017-04-06T11:00:00.000Z");
  Minutes periodTime = Minutes.minutesBetween(countSince, countTo);
  GetMetricStatisticsRequest metricStatisticsRequest =
      underTest.createMetricStatisticsRequest(STREAM, countSince, countTo, periodTime);
  GetMetricStatisticsResult result = new GetMetricStatisticsResult()
      .withDatapoints(
          new Datapoint().withSum(1.0),
          new Datapoint().withSum(3.0),
          new Datapoint().withSum(2.0)
      );

  given(cloudWatch.getMetricStatistics(metricStatisticsRequest)).willReturn(result);

  long backlogBytes = underTest.getBacklogBytes(STREAM, countSince, countTo);

  assertThat(backlogBytes).isEqualTo(6L);
}
项目:cloud-runner    文件:Datum.java   
public Datum(Datapoint d, String metric, Statistic statistic) {
    switch (statistic) {
    case Sum:
        value = d.getSum();
        break;
    case Maximum:
        value = d.getMaximum();
        break;
    case Minimum:
        value = d.getMinimum();
        break;
    case SampleCount:
        value = d.getSampleCount();
        break;
    default:
        value = d.getAverage();
    }

    timestamp = d.getTimestamp().getTime();
    this.metric = metric;
    this.statistic = statistic.name();
    this.unit = d.getUnit();
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testAllStatistics() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: RequestCount", client).register(registry);

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(1.0)
              .withMaximum(2.0).withMinimum(3.0).withSampleCount(4.0).withSum(5.0)));

  assertEquals(1.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_maximum", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
  assertEquals(3.0, registry.getSampleValue("aws_elb_request_count_minimum", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
  assertEquals(4.0, registry.getSampleValue("aws_elb_request_count_sample_count", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
  assertEquals(5.0, registry.getSampleValue("aws_elb_request_count_sum", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testDimensions() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: RequestCount\n  aws_dimensions:\n  - AvailabilityZone\n  - LoadBalancerName", client).register(registry);

  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimensions("AvailabilityZone", "LoadBalancerName"))))
      .thenReturn(new ListMetricsResult().withMetrics(
        new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myLB")),
        new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myLB"), new Dimension().withName("ThisExtraDimensionIsIgnored").withValue("dummy")),
        new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("b"), new Dimension().withName("LoadBalancerName").withValue("myOtherLB"))));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "a").Dimension("LoadBalancerName", "myLB"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(2.0)));
  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "b").Dimension("LoadBalancerName", "myOtherLB"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(3.0)));

  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "a", "myLB"}), .01);
  assertEquals(3.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "b", "myOtherLB"}), .01);
  assertNull(registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name", "this_extra_dimension_is_ignored"}, new String[]{"aws_elb", "", "a", "myLB", "dummy"}));
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testDimensionSelect() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: RequestCount\n  aws_dimensions:\n  - AvailabilityZone\n  - LoadBalancerName\n  aws_dimension_select:\n    LoadBalancerName:\n    - myLB", client).register(registry);
  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimensions("AvailabilityZone", "LoadBalancerName"))))
      .thenReturn(new ListMetricsResult().withMetrics(
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myLB")),
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("b"), new Dimension().withName("LoadBalancerName").withValue("myLB")),
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myOtherLB"))));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "a").Dimension("LoadBalancerName", "myLB"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(2.0)));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "b").Dimension("LoadBalancerName", "myLB"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(2.0)));

  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "a", "myLB"}), .01);
  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "b", "myLB"}), .01);
  assertNull(registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "a", "myOtherLB"}));
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testDimensionSelectRegex() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: RequestCount\n  aws_dimensions:\n  - AvailabilityZone\n  - LoadBalancerName\n  aws_dimension_select_regex:\n    LoadBalancerName:\n    - myLB(.*)", client).register(registry);

  Mockito.when(client.listMetrics((ListMetricsRequest) argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimensions("AvailabilityZone", "LoadBalancerName"))))
      .thenReturn(new ListMetricsResult().withMetrics(
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myLB1")),
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("b"), new Dimension().withName("LoadBalancerName").withValue("myLB2")),
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myOtherLB"))));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest) argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "a").Dimension("LoadBalancerName", "myLB1"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(2.0)));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest) argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "b").Dimension("LoadBalancerName", "myLB2"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(2.0)));

  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "a", "myLB1"}), .01);
  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "b", "myLB2"}), .01);
  assertNull(registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "a", "myOtherLB"}));
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testGetDimensionsUsesNextToken() throws Exception {
  new CloudWatchCollector(
          "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: RequestCount\n  aws_dimensions:\n  - AvailabilityZone\n  - LoadBalancerName\n  aws_dimension_select:\n    LoadBalancerName:\n    - myLB", client).register(registry);

  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimensions("AvailabilityZone", "LoadBalancerName"))))
      .thenReturn(new ListMetricsResult().withNextToken("ABC"));
  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimensions("AvailabilityZone", "LoadBalancerName").NextToken("ABC"))))
      .thenReturn(new ListMetricsResult().withMetrics(
          new Metric().withDimensions(new Dimension().withName("AvailabilityZone").withValue("a"), new Dimension().withName("LoadBalancerName").withValue("myLB"))));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount").Dimension("AvailabilityZone", "a").Dimension("LoadBalancerName", "myLB"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withAverage(2.0)));

  assertEquals(2.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance", "availability_zone", "load_balancer_name"}, new String[]{"aws_elb", "", "a", "myLB"}), .01);
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testExtendedStatistics() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: Latency\n  aws_extended_statistics:\n  - p95\n  - p99.99", client).register(registry);

  HashMap<String, Double> extendedStatistics = new HashMap<String, Double>();
  extendedStatistics.put("p95", 1.0);
  extendedStatistics.put("p99.99", 2.0);

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("Latency"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withExtendedStatistics(extendedStatistics)));

  assertEquals(1.0, registry.getSampleValue("aws_elb_latency_p95", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
  assertEquals(2.0, registry.getSampleValue("aws_elb_latency_p99_99", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
}
项目:dwtc-extractor    文件:Master.java   
public void monitorCPUUsage() {
    AmazonCloudWatchClient cloudClient = new AmazonCloudWatchClient(
            getAwsCredentials());
    GetMetricStatisticsRequest request = new GetMetricStatisticsRequest();
    Calendar cal = Calendar.getInstance();
    request.setEndTime(cal.getTime());
    cal.add(Calendar.MINUTE, -5);
    request.setStartTime(cal.getTime());
    request.setNamespace("AWS/EC2");
    List<String> statistics = new ArrayList<String>();
    statistics.add("Maximium");
    statistics.add("Average");
    request.setStatistics(statistics);
    request.setMetricName("CPUUtilization");
    request.setPeriod(300);
    Dimension dimension = new Dimension();
    dimension.setName("InstanceId");
    dimension.setValue("i-d93fa2a4");
    List<Dimension> dimensions = new ArrayList<Dimension>();
    dimensions.add(dimension);
    request.setDimensions(dimensions);
    GetMetricStatisticsResult result = cloudClient
            .getMetricStatistics(request);
    List<Datapoint> dataPoints = result.getDatapoints();
    for (Datapoint dataPoint : dataPoints) {
        System.out.println(dataPoint.getAverage());
    }

}
项目:aws-mock    文件:CloudWatchBaseTest.java   
/**
 * GetMetricStaticticsTest to get the data points
 *
 * @return Datapoint
 */
protected final Datapoint getMetricStaticticsTest(String metricName) {
    Datapoint dataPoint = null;
    GetMetricStatisticsRequest request = new GetMetricStatisticsRequest();
    request.setStartTime(new DateTime().plusHours(HOURS).toDate());
    request.withNamespace(NAMESPACE);
    request.withPeriod(60 * 60);
    request.withMetricName(metricName);
    request.withStatistics("Average", "SampleCount");
    request.withEndTime(new Date());
    GetMetricStatisticsResult result = amazonCloudWatchClient.getMetricStatistics(request);
    if (result != null && !result.getDatapoints().isEmpty()) {
        dataPoint = result.getDatapoints().get(0);
    }

    return dataPoint;
}
项目:scouter-pulse-aws-monitor    文件:GetStatisticsTask.java   
private GetMetricStatisticsResult sort(List<Datapoint> resultSet) {
    Collections.sort(resultSet, new Comparator<Datapoint>() {
        @Override
        public int compare(Datapoint o1, Datapoint o2) {
            return o1.getTimestamp().compareTo(o2.getTimestamp());
        }
    });

    return new GetMetricStatisticsResult().withDatapoints(resultSet);
}
项目:photon-model    文件:AWSStatsService.java   
@Override
public void onSuccess(GetMetricStatisticsRequest request,
        GetMetricStatisticsResult result) {
    try {
        OperationContext.restoreOperationContext(this.opContext);
        List<ServiceStat> statDatapoints = new ArrayList<>();
        List<Datapoint> dpList = result.getDatapoints();
        if (dpList != null && dpList.size() != 0) {
            for (Datapoint dp : dpList) {
                ServiceStat stat = new ServiceStat();
                stat.latestValue = dp.getAverage();
                stat.unit = AWSStatsNormalizer.getNormalizedUnitValue(dp.getUnit());
                stat.sourceTimeMicrosUtc = TimeUnit.MILLISECONDS
                        .toMicros(dp.getTimestamp().getTime());
                statDatapoints.add(stat);
            }

            this.statsData.statsResponse.statValues
                    .put(AWSStatsNormalizer.getNormalizedStatKeyValue(result.getLabel()),
                            statDatapoints);
        }

        if (this.statsData.numResponses.incrementAndGet() == this.numOfMetrics) {
            sendStats(this.statsData);
        }
    } catch (Exception e) {
        this.statsData.taskManager.patchTaskToFailure(e);
    }
}
项目:photon-model    文件:AWSUtils.java   
/**
 * Calculate the average burn rate, given a list of datapoints from Amazon AWS.
 */
public static Double calculateAverageBurnRate(List<Datapoint> dpList) {
    if (dpList.size() <= 1) {
        return null;
    }
    Datapoint oldestDatapoint = dpList.get(0);
    Datapoint latestDatapoint = dpList.get(dpList.size() - 1);

    // Adjust oldest datapoint to account for billing cycle when the estimated charges is reset
    // to 0.
    // Iterate over the sublist from the oldestDatapoint element + 1 to the latestDatapoint
    // element (excluding).
    // If the oldestDatapoint value is greater than the latestDatapoint value,
    // move the oldestDatapoint pointer until the oldestDatapoint value is less than the
    // latestDatapoint value.
    // Eg: 4,5,6,7,0,1,2,3 -> 4 is greater than 3. Move the pointer until 0.
    // OldestDatapoint value is 0 and the latestDatapoint value is 3.
    for (Datapoint datapoint : dpList.subList(1, dpList.size() - 1)) {
        if (latestDatapoint.getAverage() > oldestDatapoint.getAverage()) {
            break;
        }
        oldestDatapoint = datapoint;
    }

    double averageBurnRate = (latestDatapoint.getAverage()
            - oldestDatapoint.getAverage())
            / getDateDifference(oldestDatapoint.getTimestamp(),
                    latestDatapoint.getTimestamp(), TimeUnit.HOURS);
    // If there are only 2 datapoints and the oldestDatapoint is greater than the
    // latestDatapoint, value will be negative.
    // Eg: oldestDatapoint = 5 and latestDatapoint = 0, when the billing cycle is reset.
    // In such cases, set the burn rate value to 0
    averageBurnRate = (averageBurnRate < 0 ? 0 : averageBurnRate);
    return averageBurnRate;
}
项目:photon-model    文件:AWSUtils.java   
/**
 * Calculate the current burn rate, given a list of datapoints from Amazon AWS.
 */
public static Double calculateCurrentBurnRate(List<Datapoint> dpList) {
    if (dpList.size() <= 7) {
        return null;
    }
    Datapoint dayOldDatapoint = dpList.get(dpList.size() - 7);
    Datapoint latestDatapoint = dpList.get(dpList.size() - 1);

    // Adjust the dayOldDatapoint to account for billing cycle when the estimated charges is
    // reset to 0.
    // Iterate over the sublist from the oldestDatapoint element + 1 to the latestDatapoint
    // element.
    // If the oldestDatapoint value is greater than the latestDatapoint value,
    // move the oldestDatapoint pointer until the oldestDatapoint value is less than the
    // latestDatapoint value.
    // Eg: 4,5,6,7,0,1,2,3 -> 4 is greater than 3. Move the pointer until 0.
    // OldestDatapoint value is 0 and the latestDatapoint value is 3.
    for (Datapoint datapoint : dpList.subList(dpList.size() - 6, dpList.size() - 1)) {
        if (latestDatapoint.getAverage() > dayOldDatapoint.getAverage()) {
            break;
        }
        dayOldDatapoint = datapoint;
    }

    double currentBurnRate = (latestDatapoint.getAverage()
            - dayOldDatapoint.getAverage())
            / getDateDifference(dayOldDatapoint.getTimestamp(),
                    latestDatapoint.getTimestamp(), TimeUnit.HOURS);
    // If there are only 2 datapoints and the oldestDatapoint is greater than the
    // latestDatapoint, value will be negative.
    // Eg: oldestDatapoint = 5 and latestDatapoint = 0, when the billing cycle is reset.
    // In such cases, set the burn rate value to 0
    currentBurnRate = (currentBurnRate < 0 ? 0 : currentBurnRate);
    return currentBurnRate;
}
项目:photon-model    文件:TestAWSUtils.java   
/**
 * Test that expects null response since there arn't sufficient datapoints
 */
@Test
public void testAverageBurnRateCalculationExpectNull() {
    List<Datapoint> dpList = new ArrayList<>();
    dpList.add(new Datapoint());
    Double burnRate = AWSUtils.calculateAverageBurnRate(dpList);
    assertTrue("Received a non-null response", burnRate == null);
}
项目:photon-model    文件:TestAWSUtils.java   
/**
 * Test that expects null response since there arn't sufficient datapoints
 */
@Test
public void testCurrentBurnRateCalculationExpectNull() {
    List<Datapoint> dpList = new ArrayList<>();
    dpList.add(new Datapoint());
    Double burnRate = AWSUtils.calculateCurrentBurnRate(dpList);
    assertTrue("Received a non-null response", burnRate == null);
}
项目:candlestack    文件:CloudWatchAccessor.java   
public void lookupAndSaveMetricData( CloudWatchMetric metric, String dimensionValue, String type ) throws CandlestackAWSException, CandlestackException {

        String datapointDateMapKey = getDatapointDateMapKey( metric, dimensionValue );

        // Determine the last time we fetched datapoints for this metric and dimension
        Date lastDatapointDate = lastDatapointDateMap.get( datapointDateMapKey );
        if ( lastDatapointDate == null ) {
            lastDatapointDate = metricsReaderWriter.readMostRecentMetricDate( type, dimensionValue, metric.getName() );
        }

        // Build the request and execute it
        GetMetricStatisticsRequest request = cloudWatchRequest( metric, dimensionValue, lastDatapointDate );
        GetMetricStatisticsResult result = cloudWatchClient.getMetricStatistics( request );

        // Sort the datapoints in chronological order
        List<Datapoint> datapoints = result.getDatapoints();
        datapoints.sort( new DatapointComparator() );

        // Write the data points
        for ( Datapoint datapoint : datapoints ) {

            // Only care about data points that have happened after the last one
            if ( lastDatapointDate == null || datapoint.getTimestamp().after( lastDatapointDate ) ) {
                lastDatapointDate = datapoint.getTimestamp();
                metricsReaderWriter.writeMetric( type, dimensionValue, datapoint.getTimestamp(), metric.getName(), metric.getStatistic().getValueFromDatapoint( datapoint ) );
            }

        }

        // Update the date map
        lastDatapointDateMap.put( datapointDateMapKey, lastDatapointDate );

    }
项目:cloud-runner    文件:Datum.java   
public static List<Datum> getAllData(List<Datapoint> datapoints, String metric, Statistic statistic) {
    ArrayList<Datum> data = new ArrayList<Datum>();
    for (Datapoint d : datapoints)
        data.add(new Datum(d, metric, statistic));
    Collections.sort(data);
    return data;
}
项目:cloudwatch_exporter    文件:CloudWatchCollector.java   
private Datapoint getNewestDatapoint(java.util.List<Datapoint> datapoints) {
  Datapoint newest = null;
  for (Datapoint d: datapoints) {
    if (newest == null || newest.getTimestamp().before(d.getTimestamp())) {
      newest = d;
    }
  }
  return newest;
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testUsesNewestDatapoint() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/ELB\n  aws_metric_name: RequestCount", client).register(registry);

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/ELB").MetricName("RequestCount"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date(1)).withAverage(1.0),
          new Datapoint().withTimestamp(new Date(3)).withAverage(3.0),
          new Datapoint().withTimestamp(new Date(2)).withAverage(2.0)));

  assertEquals(3.0, registry.getSampleValue("aws_elb_request_count_average", new String[]{"job", "instance"}, new String[]{"aws_elb", ""}), .01);
}
项目:cloudwatch_exporter    文件:CloudWatchCollectorTest.java   
@Test
public void testDynamoIndexDimensions() throws Exception {
  new CloudWatchCollector(
      "---\nregion: reg\nmetrics:\n- aws_namespace: AWS/DynamoDB\n  aws_metric_name: ConsumedReadCapacityUnits\n  aws_dimensions:\n  - TableName\n  - GlobalSecondaryIndexName\n- aws_namespace: AWS/DynamoDB\n  aws_metric_name: OnlineIndexConsumedWriteCapacity\n  aws_dimensions:\n  - TableName\n  - GlobalSecondaryIndexName\n- aws_namespace: AWS/DynamoDB\n  aws_metric_name: ConsumedReadCapacityUnits\n  aws_dimensions:\n  - TableName", client).register(registry);
  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/DynamoDB").MetricName("ConsumedReadCapacityUnits").Dimensions("TableName", "GlobalSecondaryIndexName"))))
      .thenReturn(new ListMetricsResult().withMetrics(
        new Metric().withDimensions(new Dimension().withName("TableName").withValue("myTable"), new Dimension().withName("GlobalSecondaryIndexName").withValue("myIndex"))));
  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/DynamoDB").MetricName("OnlineIndexConsumedWriteCapacity").Dimensions("TableName", "GlobalSecondaryIndexName"))))
      .thenReturn(new ListMetricsResult().withMetrics(
        new Metric().withDimensions(new Dimension().withName("TableName").withValue("myTable"), new Dimension().withName("GlobalSecondaryIndexName").withValue("myIndex"))));
  Mockito.when(client.listMetrics((ListMetricsRequest)argThat(
      new ListMetricsRequestMatcher().Namespace("AWS/DynamoDB").MetricName("ConsumedReadCapacityUnits").Dimensions("TableName"))))
      .thenReturn(new ListMetricsResult().withMetrics(
        new Metric().withDimensions(new Dimension().withName("TableName").withValue("myTable"))));

  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/DynamoDB").MetricName("ConsumedReadCapacityUnits").Dimension("TableName", "myTable").Dimension("GlobalSecondaryIndexName", "myIndex"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withSum(1.0)));
  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/DynamoDB").MetricName("OnlineIndexConsumedWriteCapacity").Dimension("TableName", "myTable").Dimension("GlobalSecondaryIndexName", "myIndex"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withSum(2.0)));
  Mockito.when(client.getMetricStatistics((GetMetricStatisticsRequest)argThat(
      new GetMetricStatisticsRequestMatcher().Namespace("AWS/DynamoDB").MetricName("ConsumedReadCapacityUnits").Dimension("TableName", "myTable"))))
      .thenReturn(new GetMetricStatisticsResult().withDatapoints(
          new Datapoint().withTimestamp(new Date()).withSum(3.0)));

  assertEquals(1.0, registry.getSampleValue("aws_dynamodb_consumed_read_capacity_units_index_sum", new String[]{"job", "instance", "table_name", "global_secondary_index_name"}, new String[]{"aws_dynamodb", "", "myTable", "myIndex"}), .01);
  assertEquals(2.0, registry.getSampleValue("aws_dynamodb_online_index_consumed_write_capacity_sum", new String[]{"job", "instance", "table_name", "global_secondary_index_name"}, new String[]{"aws_dynamodb", "", "myTable", "myIndex"}), .01);
  assertEquals(3.0, registry.getSampleValue("aws_dynamodb_consumed_read_capacity_units_sum", new String[]{"job", "instance", "table_name"}, new String[]{"aws_dynamodb", "", "myTable"}), .01);
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for CPUUtilization.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTest() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.CPU_UTILIZATION);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for Disk Read bytes.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTestForDiskReadBytes() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.DISK_READ_BYTES);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for Disk Write bytes.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTestForDiskWriteBytes() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.DISK_WRITE_BYTES);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for Disk Read Ops.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTestForDiskReadOps() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.DISK_READ_OPS);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for Disk Write Ops.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTestForDiskWriteOps() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.DISK_WRITE_OPS);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for Network In.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTestForNetworkIn() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.NETWORK_IN);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:aws-mock    文件:GetMetricStaticticsCloudwatchTest.java   
/**
 * Test GetMetricStatictics for Network Out.
 */
@Test(timeout = TIMEOUT_LEVEL1)
public final void GetMetricStaticticsTestForNetworkOut() {
    log.info("Start GetMetricStatictics Cloudwatch test");

    Datapoint dataPoint = getMetricStaticticsTest(Constants.NETWORK_OUT);

    Assert.assertNotNull("data point should not be null", dataPoint);
    Assert.assertNotNull("average should not be null", dataPoint.getAverage());
    Assert.assertNotNull("sample count should not be null", dataPoint.getSampleCount());
}
项目:photon-model    文件:AWSStatsService.java   
@Override
public void onSuccess(GetMetricStatisticsRequest request,
        GetMetricStatisticsResult result) {
    try {
        OperationContext.restoreOperationContext(this.opContext);
        List<Datapoint> dpList = result.getDatapoints();
        // Sort the data points in increasing order of timestamp to calculate Burn rate
        Collections
                .sort(dpList, (o1, o2) -> o1.getTimestamp().compareTo(o2.getTimestamp()));

        List<ServiceStat> estimatedChargesDatapoints = new ArrayList<>();
        if (dpList != null && dpList.size() != 0) {
            for (Datapoint dp : dpList) {
                // If the datapoint collected is older than the last collection time, skip it.
                if (this.lastCollectionTimeMicrosUtc != null &&
                        TimeUnit.MILLISECONDS.toMicros(dp.getTimestamp()
                                .getTime())
                                <= this.lastCollectionTimeMicrosUtc) {
                    continue;
                }

                // If there is no lastCollectionTime or the datapoint collected in newer
                // than the lastCollectionTime, push it.
                ServiceStat stat = new ServiceStat();
                stat.latestValue = dp.getAverage();
                stat.unit = AWSStatsNormalizer
                        .getNormalizedUnitValue(DIMENSION_CURRENCY_VALUE);
                stat.sourceTimeMicrosUtc = TimeUnit.MILLISECONDS
                        .toMicros(dp.getTimestamp().getTime());
                estimatedChargesDatapoints.add(stat);
            }

            this.statsData.statsResponse.statValues.put(
                    AWSStatsNormalizer.getNormalizedStatKeyValue(result.getLabel()),
                    estimatedChargesDatapoints);

            // Calculate average burn rate only if there is more than 1 datapoint available.
            // This will ensure that NaN errors will not occur.
            if (dpList.size() > 1) {
                ServiceStat averageBurnRate = new ServiceStat();
                averageBurnRate.latestValue = AWSUtils.calculateAverageBurnRate(dpList);
                averageBurnRate.unit = AWSStatsNormalizer
                        .getNormalizedUnitValue(DIMENSION_CURRENCY_VALUE);
                averageBurnRate.sourceTimeMicrosUtc = Utils.getSystemNowMicrosUtc();
                this.statsData.statsResponse.statValues.put(
                        AWSStatsNormalizer
                                .getNormalizedStatKeyValue(AWSConstants.AVERAGE_BURN_RATE),
                        Collections.singletonList(averageBurnRate));
            }

            // Calculate current burn rate only if there is more than 1 day worth of data available.
            if (dpList.size() > NUM_OF_COST_DATAPOINTS_IN_A_DAY) {
                ServiceStat currentBurnRate = new ServiceStat();
                currentBurnRate.latestValue = AWSUtils.calculateCurrentBurnRate(dpList);
                currentBurnRate.unit = AWSStatsNormalizer
                        .getNormalizedUnitValue(DIMENSION_CURRENCY_VALUE);
                currentBurnRate.sourceTimeMicrosUtc = Utils.getSystemNowMicrosUtc();
                this.statsData.statsResponse.statValues.put(
                        AWSStatsNormalizer
                                .getNormalizedStatKeyValue(AWSConstants.CURRENT_BURN_RATE),
                        Collections.singletonList(currentBurnRate));
            }
        }
        sendStats(this.statsData);
    } catch (Exception e) {
        this.statsData.taskManager.patchTaskToFailure(e);
    }
}
项目:photon-model    文件:TestAWSUtils.java   
private Datapoint getDatapoint(Double average, Date timestamp) {
    Datapoint dp = new Datapoint();
    dp.setAverage(average);
    dp.setTimestamp(timestamp);
    return dp;
}
项目:candlestack    文件:CloudWatchAccessor.java   
@Override
public int compare( Datapoint datapoint1, Datapoint datapoint2 ) {
    return datapoint1.getTimestamp().compareTo( datapoint2.getTimestamp() );
}
项目:stratos    文件:AWSHelper.java   
/**
 * Retrieves the total number of requests that were made to the load
 * balancer during the given time interval in the past
 *
 * @param loadBalancerName
 * @param region
 * @param timeInterval     in seconds which must be multiple of 60
 * @return number of requests made
 */
public int getRequestCount(String loadBalancerName, String region,
                           int timeInterval) {
    int count = 0;

    try {
        GetMetricStatisticsRequest request = new GetMetricStatisticsRequest();
        request.setMetricName(Constants.REQUEST_COUNT_METRIC_NAME);
        request.setNamespace(Constants.CLOUD_WATCH_NAMESPACE_NAME);

        Date currentTime = new DateTime(DateTimeZone.UTC).toDate();
        Date pastTime = new DateTime(DateTimeZone.UTC).minusSeconds(
                timeInterval).toDate();

        request.setStartTime(pastTime);
        request.setEndTime(currentTime);

        request.setPeriod(timeInterval);

        HashSet<String> statistics = new HashSet<String>();
        statistics.add(Constants.SUM_STATISTICS_NAME);
        request.setStatistics(statistics);

        HashSet<Dimension> dimensions = new HashSet<Dimension>();
        Dimension loadBalancerDimension = new Dimension();
        loadBalancerDimension
                .setName(Constants.LOAD_BALANCER_DIMENTION_NAME);
        loadBalancerDimension.setValue(loadBalancerName);
        dimensions.add(loadBalancerDimension);
        request.setDimensions(dimensions);

        cloudWatchClient.setEndpoint(String.format(
                Constants.CLOUD_WATCH_ENDPOINT_URL_FORMAT, region));

        GetMetricStatisticsResult result = cloudWatchClient
                .getMetricStatistics(request);

        List<Datapoint> dataPoints = result.getDatapoints();

        if (dataPoints != null && dataPoints.size() > 0) {
            count = dataPoints.get(0).getSum().intValue();
        }

    } catch (AmazonClientException e) {
        log.error(
                "Could not get request count statistics of load balancer "
                        + loadBalancerName, e);
    }

    return count;
}
项目:stratos    文件:AWSHelper.java   
/**
 * Retrieves the number of responses generated for a particular response
 * code like 2XX, 3XX, 4XX, 5XX
 *
 * @param loadBalancerName
 * @param region
 * @param metricName       which is one among HTTPCode_Backend_2XX or
 *                         HTTPCode_Backend_3XX or HTTPCode_Backend_4XX or
 *                         HTTPCode_Backend_5XX
 * @param startTime        of the window to be scanned
 * @param endTime          of the window to be scanned
 * @param timeInterval     in seconds
 * @return number for response for this metric
 */
public int getResponseCountForMetric(String loadBalancerName,
                                     String region, String metricName, Date startTime, Date endTime,
                                     int timeInterval) {
    int count = 0;

    try {
        GetMetricStatisticsRequest request = new GetMetricStatisticsRequest();
        request.setMetricName(metricName);
        request.setNamespace(Constants.CLOUD_WATCH_NAMESPACE_NAME);

        request.setStartTime(startTime);
        request.setEndTime(endTime);

        request.setPeriod(timeInterval);

        HashSet<String> statistics = new HashSet<String>();
        statistics.add(Constants.SUM_STATISTICS_NAME);
        request.setStatistics(statistics);

        HashSet<Dimension> dimensions = new HashSet<Dimension>();
        Dimension loadBalancerDimension = new Dimension();
        loadBalancerDimension
                .setName(Constants.LOAD_BALANCER_DIMENTION_NAME);
        loadBalancerDimension.setValue(loadBalancerName);
        dimensions.add(loadBalancerDimension);
        request.setDimensions(dimensions);

        cloudWatchClient.setEndpoint(String.format(
                Constants.CLOUD_WATCH_ENDPOINT_URL_FORMAT, region));

        GetMetricStatisticsResult result = cloudWatchClient
                .getMetricStatistics(request);

        List<Datapoint> dataPoints = result.getDatapoints();

        if (dataPoints != null && dataPoints.size() > 0) {
            count = dataPoints.get(0).getSum().intValue();
        }

    } catch (AmazonClientException e) {
        log.error("Could not get the statistics for metric " + metricName
                + " of load balancer " + loadBalancerName, e);
    }

    return count;
}