forked from DataDog/datadog-agent
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcheck_sampler.go
187 lines (161 loc) · 5.78 KB
/
check_sampler.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
// Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-2020 Datadog, Inc.
package aggregator
import (
"math"
"time"
"github.com/DataDog/datadog-agent/pkg/aggregator/ckey"
"github.com/DataDog/datadog-agent/pkg/metrics"
"github.com/DataDog/datadog-agent/pkg/util/log"
)
const checksSourceTypeName = "System"
// CheckSampler aggregates metrics from one Check instance
type CheckSampler struct {
series []*metrics.Serie
sketches []metrics.SketchSeries
contextResolver *ContextResolver
metrics metrics.ContextMetrics
sketchMap sketchMap
lastBucketValue map[ckey.ContextKey]int64
lastSeenBucket map[ckey.ContextKey]time.Time
bucketExpiry time.Duration
}
// newCheckSampler returns a newly initialized CheckSampler
func newCheckSampler() *CheckSampler {
return &CheckSampler{
series: make([]*metrics.Serie, 0),
sketches: make([]metrics.SketchSeries, 0),
contextResolver: newContextResolver(),
metrics: metrics.MakeContextMetrics(),
sketchMap: make(sketchMap),
lastBucketValue: make(map[ckey.ContextKey]int64),
lastSeenBucket: make(map[ckey.ContextKey]time.Time),
bucketExpiry: 1 * time.Minute,
}
}
func (cs *CheckSampler) addSample(metricSample *metrics.MetricSample) {
contextKey := cs.contextResolver.trackContext(metricSample, metricSample.Timestamp)
if err := cs.metrics.AddSample(contextKey, metricSample, metricSample.Timestamp, 1); err != nil {
log.Debug("Ignoring sample '%s' on host '%s' and tags '%s': %s", metricSample.Name, metricSample.Host, metricSample.Tags, err)
}
}
func (cs *CheckSampler) newSketchSeries(ck ckey.ContextKey, points []metrics.SketchPoint) metrics.SketchSeries {
ctx := cs.contextResolver.contextsByKey[ck]
ss := metrics.SketchSeries{
Name: ctx.Name,
Tags: ctx.Tags,
Host: ctx.Host,
// Interval: TODO: investigate
Points: points,
ContextKey: ck,
}
return ss
}
func (cs *CheckSampler) addBucket(bucket *metrics.HistogramBucket) {
if bucket.Value < 0 {
log.Warnf("Negative bucket value %d for metric %s discarding", bucket.Value, bucket.Name)
return
}
if bucket.Value == 0 {
// noop
return
}
bucketRange := bucket.UpperBound - bucket.LowerBound
if bucketRange < 0 {
log.Warnf(
"Negative bucket range [%f-%f] for metric %s discarding",
bucket.LowerBound, bucket.UpperBound, bucket.Name,
)
return
}
contextKey := cs.contextResolver.trackContext(bucket, bucket.Timestamp)
// if the bucket is monotonic and we have already seen the bucket we only send the delta
if bucket.Monotonic {
lastBucketValue, bucketFound := cs.lastBucketValue[contextKey]
rawValue := bucket.Value
if bucketFound {
cs.lastSeenBucket[contextKey] = time.Now()
bucket.Value = rawValue - lastBucketValue
}
cs.lastBucketValue[contextKey] = rawValue
cs.lastSeenBucket[contextKey] = time.Now()
}
if bucket.Value < 0 {
log.Warnf("Negative bucket delta %d for metric %s discarding", bucket.Value, bucket.Name)
return
}
if bucket.Value == 0 {
// noop
return
}
// "if the quantile falls into the highest bucket, the upper bound of the 2nd highest bucket is returned"
if math.IsInf(bucket.UpperBound, 1) {
cs.sketchMap.insertInterp(int64(bucket.Timestamp), contextKey, bucket.LowerBound, bucket.LowerBound, uint(bucket.Value))
return
}
log.Tracef(
"Interpolating %d values over the [%f-%f] bucket",
bucket.Value, bucket.LowerBound, bucket.UpperBound,
)
cs.sketchMap.insertInterp(int64(bucket.Timestamp), contextKey, bucket.LowerBound, bucket.UpperBound, uint(bucket.Value))
}
func (cs *CheckSampler) commitSeries(timestamp float64) {
series, errors := cs.metrics.Flush(timestamp)
for ckey, err := range errors {
context, ok := cs.contextResolver.contextsByKey[ckey]
if !ok {
log.Errorf("Can't resolve context of error '%s': inconsistent context resolver state: context with key '%v' is not tracked", err, ckey)
continue
}
log.Infof("No value returned for check metric '%s' on host '%s' and tags '%s': %s", context.Name, context.Host, context.Tags, err)
}
for _, serie := range series {
// Resolve context and populate new []Serie
context, ok := cs.contextResolver.contextsByKey[serie.ContextKey]
if !ok {
log.Errorf("Ignoring all metrics on context key '%v': inconsistent context resolver state: the context is not tracked", serie.ContextKey)
continue
}
serie.Name = context.Name + serie.NameSuffix
serie.Tags = context.Tags
serie.Host = context.Host
serie.SourceTypeName = checksSourceTypeName // this source type is required for metrics coming from the checks
cs.series = append(cs.series, serie)
}
}
func (cs *CheckSampler) commitSketches(timestamp float64) {
pointsByCtx := make(map[ckey.ContextKey][]metrics.SketchPoint)
cs.sketchMap.flushBefore(int64(timestamp), func(ck ckey.ContextKey, p metrics.SketchPoint) {
if p.Sketch == nil {
return
}
pointsByCtx[ck] = append(pointsByCtx[ck], p)
})
for ck, points := range pointsByCtx {
cs.sketches = append(cs.sketches, cs.newSketchSeries(ck, points))
}
}
func (cs *CheckSampler) commit(timestamp float64) {
cs.commitSeries(timestamp)
cs.commitSketches(timestamp)
cs.contextResolver.expireContexts(timestamp - defaultExpiry)
}
func (cs *CheckSampler) flush() (metrics.Series, metrics.SketchSeriesList) {
// series
series := cs.series
cs.series = make([]*metrics.Serie, 0)
// sketches
sketches := cs.sketches
cs.sketches = make([]metrics.SketchSeries, 0)
// garbage collect unused bucket deltas
now := time.Now()
for ctxKey, lastSeenBucket := range cs.lastSeenBucket {
if now.Sub(lastSeenBucket) > cs.bucketExpiry {
delete(cs.lastSeenBucket, ctxKey)
delete(cs.lastBucketValue, ctxKey)
}
}
return series, sketches
}