Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add StorageResolution config to CloudWatch output #6689

Merged
merged 1 commit into from
Nov 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion plugins/outputs/cloudwatch/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,7 @@ also save AWS API cost. If enable this flag, this plugin would parse the require
[CloudWatch statistic fields](https://docs.aws.amazon.com/sdk-for-go/api/service/cloudwatch/#StatisticSet)
(count, min, max, and sum) and send them to CloudWatch. You could use `basicstats`
aggregator to calculate those fields. If not all statistic fields are available,
all fields would still be sent as raw metrics.
all fields would still be sent as raw metrics.

## high_resolution_metrics

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is missing one dash, to make in nest under Config section?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

True, I'm gonna make a new PR to fix it.

Enable high resolution metrics (1 second precision) instead of standard ones (60 seconds precision)
60 changes: 37 additions & 23 deletions plugins/outputs/cloudwatch/cloudwatch.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@ type CloudWatch struct {
Token string `toml:"token"`
EndpointURL string `toml:"endpoint_url"`

Namespace string `toml:"namespace"` // CloudWatch Metrics Namespace
svc *cloudwatch.CloudWatch
Namespace string `toml:"namespace"` // CloudWatch Metrics Namespace
HighResolutionMetrics bool `toml:"high_resolution_metrics"`
svc *cloudwatch.CloudWatch

WriteStatistics bool `toml:"write_statistics"`
}
Expand All @@ -47,11 +48,12 @@ type cloudwatchField interface {
}

type statisticField struct {
metricName string
fieldName string
tags map[string]string
values map[statisticType]float64
timestamp time.Time
metricName string
fieldName string
tags map[string]string
values map[statisticType]float64
timestamp time.Time
storageResolution int64
}

func (f *statisticField) addValue(sType statisticType, value float64) {
Expand Down Expand Up @@ -81,6 +83,7 @@ func (f *statisticField) buildDatum() []*cloudwatch.MetricDatum {
Sum: aws.Float64(sum),
SampleCount: aws.Float64(count),
},
StorageResolution: aws.Int64(f.storageResolution),
}

datums = append(datums, datum)
Expand Down Expand Up @@ -126,11 +129,12 @@ func (f *statisticField) hasAllFields() bool {
}

type valueField struct {
metricName string
fieldName string
tags map[string]string
value float64
timestamp time.Time
metricName string
fieldName string
tags map[string]string
value float64
timestamp time.Time
storageResolution int64
}

func (f *valueField) addValue(sType statisticType, value float64) {
Expand All @@ -143,10 +147,11 @@ func (f *valueField) buildDatum() []*cloudwatch.MetricDatum {

return []*cloudwatch.MetricDatum{
{
MetricName: aws.String(strings.Join([]string{f.metricName, f.fieldName}, "_")),
Value: aws.Float64(f.value),
Dimensions: BuildDimensions(f.tags),
Timestamp: aws.Time(f.timestamp),
MetricName: aws.String(strings.Join([]string{f.metricName, f.fieldName}, "_")),
Value: aws.Float64(f.value),
Dimensions: BuildDimensions(f.tags),
Timestamp: aws.Time(f.timestamp),
StorageResolution: aws.Int64(f.storageResolution),
},
}
}
Expand Down Expand Up @@ -186,6 +191,9 @@ var sampleConfig = `
## You could use basicstats aggregator to calculate those fields. If not all statistic
## fields are available, all fields would still be sent as raw metrics.
# write_statistics = false

## Enable high resolution metrics of 1 second (standard resolution metrics are 60 seconds)
## high_resolution_metrics = false
`

func (c *CloudWatch) SampleConfig() string {
Expand Down Expand Up @@ -220,7 +228,7 @@ func (c *CloudWatch) Write(metrics []telegraf.Metric) error {

var datums []*cloudwatch.MetricDatum
for _, m := range metrics {
d := BuildMetricDatum(c.WriteStatistics, m)
d := BuildMetricDatum(c.WriteStatistics, c.HighResolutionMetrics, m)
datums = append(datums, d...)
}

Expand Down Expand Up @@ -278,10 +286,14 @@ func PartitionDatums(size int, datums []*cloudwatch.MetricDatum) [][]*cloudwatch
// Make a MetricDatum from telegraf.Metric. It would check if all required fields of
// cloudwatch.StatisticSet are available. If so, it would build MetricDatum from statistic values.
// Otherwise, fields would still been built independently.
func BuildMetricDatum(buildStatistic bool, point telegraf.Metric) []*cloudwatch.MetricDatum {
func BuildMetricDatum(buildStatistic bool, highResolutionMetrics bool, point telegraf.Metric) []*cloudwatch.MetricDatum {

fields := make(map[string]cloudwatchField)
tags := point.Tags()
storageResolution := int64(60)
if highResolutionMetrics {
storageResolution = 1
}

for k, v := range point.Fields() {

Expand All @@ -297,11 +309,12 @@ func BuildMetricDatum(buildStatistic bool, point telegraf.Metric) []*cloudwatch.
// If statistic metric is not enabled or non-statistic type, just take current field as a value field.
if !buildStatistic || sType == statisticTypeNone {
fields[k] = &valueField{
metricName: point.Name(),
fieldName: k,
tags: tags,
timestamp: point.Time(),
value: val,
metricName: point.Name(),
fieldName: k,
tags: tags,
timestamp: point.Time(),
value: val,
storageResolution: storageResolution,
}
continue
}
Expand All @@ -317,6 +330,7 @@ func BuildMetricDatum(buildStatistic bool, point telegraf.Metric) []*cloudwatch.
values: map[statisticType]float64{
sType: val,
},
storageResolution: storageResolution,
}
} else {
// Add new statistic value to this field
Expand Down
29 changes: 23 additions & 6 deletions plugins/outputs/cloudwatch/cloudwatch_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,11 @@ func TestBuildMetricDatums(t *testing.T) {
testutil.TestMetric(float64(1.174272e+108)), // largest should be 1.174271e+108
}
for _, point := range validMetrics {
datums := BuildMetricDatum(false, point)
datums := BuildMetricDatum(false, false, point)
assert.Equal(1, len(datums), fmt.Sprintf("Valid point should create a Datum {value: %v}", point))
}
for _, point := range invalidMetrics {
datums := BuildMetricDatum(false, point)
datums := BuildMetricDatum(false, false, point)
assert.Equal(0, len(datums), fmt.Sprintf("Valid point should not create a Datum {value: %v}", point))
}

Expand All @@ -89,7 +89,7 @@ func TestBuildMetricDatums(t *testing.T) {
map[string]interface{}{"value_max": float64(10), "value_min": float64(0), "value_sum": float64(100), "value_count": float64(20)},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
)
datums := BuildMetricDatum(true, statisticMetric)
datums := BuildMetricDatum(true, false, statisticMetric)
assert.Equal(1, len(datums), fmt.Sprintf("Valid point should create a Datum {value: %v}", statisticMetric))

multiFieldsMetric, _ := metric.New(
Expand All @@ -98,7 +98,7 @@ func TestBuildMetricDatums(t *testing.T) {
map[string]interface{}{"valueA": float64(10), "valueB": float64(0), "valueC": float64(100), "valueD": float64(20)},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
)
datums = BuildMetricDatum(true, multiFieldsMetric)
datums = BuildMetricDatum(true, false, multiFieldsMetric)
assert.Equal(4, len(datums), fmt.Sprintf("Each field should create a Datum {value: %v}", multiFieldsMetric))

multiStatisticMetric, _ := metric.New(
Expand All @@ -112,10 +112,27 @@ func TestBuildMetricDatums(t *testing.T) {
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
)
datums = BuildMetricDatum(true, multiStatisticMetric)
datums = BuildMetricDatum(true, false, multiStatisticMetric)
assert.Equal(7, len(datums), fmt.Sprintf("Valid point should create a Datum {value: %v}", multiStatisticMetric))
}

func TestMetricDatumResolution(t *testing.T) {
const expectedStandardResolutionValue = int64(60)
const expectedHighResolutionValue = int64(1)

assert := assert.New(t)

metric := testutil.TestMetric(1)

standardResolutionDatum := BuildMetricDatum(false, false, metric)
actualStandardResolutionValue := *standardResolutionDatum[0].StorageResolution
assert.Equal(expectedStandardResolutionValue, actualStandardResolutionValue)

highResolutionDatum := BuildMetricDatum(false, true, metric)
actualHighResolutionValue := *highResolutionDatum[0].StorageResolution
assert.Equal(expectedHighResolutionValue, actualHighResolutionValue)
}

func TestBuildMetricDatums_SkipEmptyTags(t *testing.T) {
input := testutil.MustMetric(
"cpu",
Expand All @@ -129,7 +146,7 @@ func TestBuildMetricDatums_SkipEmptyTags(t *testing.T) {
time.Unix(0, 0),
)

datums := BuildMetricDatum(true, input)
datums := BuildMetricDatum(true, false, input)
require.Len(t, datums[0].Dimensions, 1)
}

Expand Down