2016-02-13 00:50:37 +00:00
|
|
|
package metrics
|
|
|
|
|
|
|
|
import (
|
2017-08-07 23:37:52 +00:00
|
|
|
"bytes"
|
2016-02-13 00:50:37 +00:00
|
|
|
"fmt"
|
|
|
|
"math"
|
2017-06-01 21:52:26 +00:00
|
|
|
"net/url"
|
2016-02-13 00:50:37 +00:00
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
2020-07-23 18:37:33 +00:00
|
|
|
var spaceReplacer = strings.NewReplacer(" ", "_")
|
|
|
|
|
2016-02-13 00:50:37 +00:00
|
|
|
// InmemSink provides a MetricSink that does in-memory aggregation
|
|
|
|
// without sending metrics over a network. It can be embedded within
|
|
|
|
// an application to provide profiling information.
|
|
|
|
type InmemSink struct {
|
|
|
|
// How long is each aggregation interval
|
|
|
|
interval time.Duration
|
|
|
|
|
|
|
|
// Retain controls how many metrics interval we keep
|
|
|
|
retain time.Duration
|
|
|
|
|
|
|
|
// maxIntervals is the maximum length of intervals.
|
|
|
|
// It is retain / interval.
|
|
|
|
maxIntervals int
|
|
|
|
|
|
|
|
// intervals is a slice of the retained intervals
|
|
|
|
intervals []*IntervalMetrics
|
|
|
|
intervalLock sync.RWMutex
|
2017-06-01 21:52:26 +00:00
|
|
|
|
2016-07-19 23:40:41 +00:00
|
|
|
rateDenom float64
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// IntervalMetrics stores the aggregated metrics
|
|
|
|
// for a specific interval
|
|
|
|
type IntervalMetrics struct {
|
|
|
|
sync.RWMutex
|
|
|
|
|
|
|
|
// The start time of the interval
|
|
|
|
Interval time.Time
|
|
|
|
|
|
|
|
// Gauges maps the key to the last set value
|
2017-08-07 23:37:52 +00:00
|
|
|
Gauges map[string]GaugeValue
|
2016-02-13 00:50:37 +00:00
|
|
|
|
|
|
|
// Points maps the string to the list of emitted values
|
|
|
|
// from EmitKey
|
|
|
|
Points map[string][]float32
|
|
|
|
|
|
|
|
// Counters maps the string key to a sum of the counter
|
|
|
|
// values
|
2017-08-07 23:37:52 +00:00
|
|
|
Counters map[string]SampledValue
|
2016-02-13 00:50:37 +00:00
|
|
|
|
|
|
|
// Samples maps the key to an AggregateSample,
|
|
|
|
// which has the rolled up view of a sample
|
2017-08-07 23:37:52 +00:00
|
|
|
Samples map[string]SampledValue
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewIntervalMetrics creates a new IntervalMetrics for a given interval
|
|
|
|
func NewIntervalMetrics(intv time.Time) *IntervalMetrics {
|
|
|
|
return &IntervalMetrics{
|
|
|
|
Interval: intv,
|
2017-08-07 23:37:52 +00:00
|
|
|
Gauges: make(map[string]GaugeValue),
|
2016-02-13 00:50:37 +00:00
|
|
|
Points: make(map[string][]float32),
|
2017-08-07 23:37:52 +00:00
|
|
|
Counters: make(map[string]SampledValue),
|
|
|
|
Samples: make(map[string]SampledValue),
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// AggregateSample is used to hold aggregate metrics
|
|
|
|
// about a sample
|
|
|
|
type AggregateSample struct {
|
|
|
|
Count int // The count of emitted pairs
|
2018-04-05 16:21:32 +00:00
|
|
|
Rate float64 // The values rate per time unit (usually 1 second)
|
2016-02-13 00:50:37 +00:00
|
|
|
Sum float64 // The sum of values
|
2017-08-07 23:37:52 +00:00
|
|
|
SumSq float64 `json:"-"` // The sum of squared values
|
2016-02-13 00:50:37 +00:00
|
|
|
Min float64 // Minimum value
|
|
|
|
Max float64 // Maximum value
|
2017-08-07 23:37:52 +00:00
|
|
|
LastUpdated time.Time `json:"-"` // When value was last updated
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Computes a Stddev of the values
|
|
|
|
func (a *AggregateSample) Stddev() float64 {
|
|
|
|
num := (float64(a.Count) * a.SumSq) - math.Pow(a.Sum, 2)
|
|
|
|
div := float64(a.Count * (a.Count - 1))
|
|
|
|
if div == 0 {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
return math.Sqrt(num / div)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Computes a mean of the values
|
|
|
|
func (a *AggregateSample) Mean() float64 {
|
|
|
|
if a.Count == 0 {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
return a.Sum / float64(a.Count)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ingest is used to update a sample
|
2016-07-19 23:40:41 +00:00
|
|
|
func (a *AggregateSample) Ingest(v float64, rateDenom float64) {
|
2016-02-13 00:50:37 +00:00
|
|
|
a.Count++
|
|
|
|
a.Sum += v
|
|
|
|
a.SumSq += (v * v)
|
|
|
|
if v < a.Min || a.Count == 1 {
|
|
|
|
a.Min = v
|
|
|
|
}
|
|
|
|
if v > a.Max || a.Count == 1 {
|
|
|
|
a.Max = v
|
|
|
|
}
|
2018-04-05 16:21:32 +00:00
|
|
|
a.Rate = float64(a.Sum) / rateDenom
|
2016-02-13 00:50:37 +00:00
|
|
|
a.LastUpdated = time.Now()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (a *AggregateSample) String() string {
|
|
|
|
if a.Count == 0 {
|
|
|
|
return "Count: 0"
|
|
|
|
} else if a.Stddev() == 0 {
|
|
|
|
return fmt.Sprintf("Count: %d Sum: %0.3f LastUpdated: %s", a.Count, a.Sum, a.LastUpdated)
|
|
|
|
} else {
|
|
|
|
return fmt.Sprintf("Count: %d Min: %0.3f Mean: %0.3f Max: %0.3f Stddev: %0.3f Sum: %0.3f LastUpdated: %s",
|
|
|
|
a.Count, a.Min, a.Mean(), a.Max, a.Stddev(), a.Sum, a.LastUpdated)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-01 21:52:26 +00:00
|
|
|
// NewInmemSinkFromURL creates an InmemSink from a URL. It is used
|
|
|
|
// (and tested) from NewMetricSinkFromURL.
|
|
|
|
func NewInmemSinkFromURL(u *url.URL) (MetricSink, error) {
|
|
|
|
params := u.Query()
|
|
|
|
|
|
|
|
interval, err := time.ParseDuration(params.Get("interval"))
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("Bad 'interval' param: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
retain, err := time.ParseDuration(params.Get("retain"))
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("Bad 'retain' param: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return NewInmemSink(interval, retain), nil
|
|
|
|
}
|
|
|
|
|
2016-02-13 00:50:37 +00:00
|
|
|
// NewInmemSink is used to construct a new in-memory sink.
|
|
|
|
// Uses an aggregation interval and maximum retention period.
|
|
|
|
func NewInmemSink(interval, retain time.Duration) *InmemSink {
|
2016-07-19 23:40:41 +00:00
|
|
|
rateTimeUnit := time.Second
|
2016-02-13 00:50:37 +00:00
|
|
|
i := &InmemSink{
|
|
|
|
interval: interval,
|
|
|
|
retain: retain,
|
|
|
|
maxIntervals: int(retain / interval),
|
2017-06-01 21:52:26 +00:00
|
|
|
rateDenom: float64(interval.Nanoseconds()) / float64(rateTimeUnit.Nanoseconds()),
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
i.intervals = make([]*IntervalMetrics, 0, i.maxIntervals)
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) SetGauge(key []string, val float32) {
|
2017-08-07 23:37:52 +00:00
|
|
|
i.SetGaugeWithLabels(key, val, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) SetGaugeWithLabels(key []string, val float32, labels []Label) {
|
|
|
|
k, name := i.flattenKeyLabels(key, labels)
|
2016-02-13 00:50:37 +00:00
|
|
|
intv := i.getInterval()
|
|
|
|
|
|
|
|
intv.Lock()
|
|
|
|
defer intv.Unlock()
|
2017-08-07 23:37:52 +00:00
|
|
|
intv.Gauges[k] = GaugeValue{Name: name, Value: val, Labels: labels}
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) EmitKey(key []string, val float32) {
|
|
|
|
k := i.flattenKey(key)
|
|
|
|
intv := i.getInterval()
|
|
|
|
|
|
|
|
intv.Lock()
|
|
|
|
defer intv.Unlock()
|
|
|
|
vals := intv.Points[k]
|
|
|
|
intv.Points[k] = append(vals, val)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) IncrCounter(key []string, val float32) {
|
2017-08-07 23:37:52 +00:00
|
|
|
i.IncrCounterWithLabels(key, val, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) IncrCounterWithLabels(key []string, val float32, labels []Label) {
|
|
|
|
k, name := i.flattenKeyLabels(key, labels)
|
2016-02-13 00:50:37 +00:00
|
|
|
intv := i.getInterval()
|
|
|
|
|
|
|
|
intv.Lock()
|
|
|
|
defer intv.Unlock()
|
|
|
|
|
2017-08-07 23:37:52 +00:00
|
|
|
agg, ok := intv.Counters[k]
|
|
|
|
if !ok {
|
|
|
|
agg = SampledValue{
|
|
|
|
Name: name,
|
|
|
|
AggregateSample: &AggregateSample{},
|
|
|
|
Labels: labels,
|
|
|
|
}
|
2016-02-13 00:50:37 +00:00
|
|
|
intv.Counters[k] = agg
|
|
|
|
}
|
2016-07-19 23:40:41 +00:00
|
|
|
agg.Ingest(float64(val), i.rateDenom)
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) AddSample(key []string, val float32) {
|
2017-08-07 23:37:52 +00:00
|
|
|
i.AddSampleWithLabels(key, val, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) AddSampleWithLabels(key []string, val float32, labels []Label) {
|
|
|
|
k, name := i.flattenKeyLabels(key, labels)
|
2016-02-13 00:50:37 +00:00
|
|
|
intv := i.getInterval()
|
|
|
|
|
|
|
|
intv.Lock()
|
|
|
|
defer intv.Unlock()
|
|
|
|
|
2017-08-07 23:37:52 +00:00
|
|
|
agg, ok := intv.Samples[k]
|
|
|
|
if !ok {
|
|
|
|
agg = SampledValue{
|
|
|
|
Name: name,
|
|
|
|
AggregateSample: &AggregateSample{},
|
|
|
|
Labels: labels,
|
|
|
|
}
|
2016-02-13 00:50:37 +00:00
|
|
|
intv.Samples[k] = agg
|
|
|
|
}
|
2016-07-19 23:40:41 +00:00
|
|
|
agg.Ingest(float64(val), i.rateDenom)
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Data is used to retrieve all the aggregated metrics
|
|
|
|
// Intervals may be in use, and a read lock should be acquired
|
|
|
|
func (i *InmemSink) Data() []*IntervalMetrics {
|
|
|
|
// Get the current interval, forces creation
|
|
|
|
i.getInterval()
|
|
|
|
|
|
|
|
i.intervalLock.RLock()
|
|
|
|
defer i.intervalLock.RUnlock()
|
|
|
|
|
2018-04-05 16:21:32 +00:00
|
|
|
n := len(i.intervals)
|
|
|
|
intervals := make([]*IntervalMetrics, n)
|
|
|
|
|
|
|
|
copy(intervals[:n-1], i.intervals[:n-1])
|
|
|
|
current := i.intervals[n-1]
|
|
|
|
|
|
|
|
// make its own copy for current interval
|
|
|
|
intervals[n-1] = &IntervalMetrics{}
|
|
|
|
copyCurrent := intervals[n-1]
|
|
|
|
current.RLock()
|
|
|
|
*copyCurrent = *current
|
|
|
|
|
|
|
|
copyCurrent.Gauges = make(map[string]GaugeValue, len(current.Gauges))
|
|
|
|
for k, v := range current.Gauges {
|
|
|
|
copyCurrent.Gauges[k] = v
|
|
|
|
}
|
|
|
|
// saved values will be not change, just copy its link
|
|
|
|
copyCurrent.Points = make(map[string][]float32, len(current.Points))
|
|
|
|
for k, v := range current.Points {
|
|
|
|
copyCurrent.Points[k] = v
|
|
|
|
}
|
|
|
|
copyCurrent.Counters = make(map[string]SampledValue, len(current.Counters))
|
|
|
|
for k, v := range current.Counters {
|
2019-06-19 12:50:48 +00:00
|
|
|
copyCurrent.Counters[k] = v.deepCopy()
|
2018-04-05 16:21:32 +00:00
|
|
|
}
|
|
|
|
copyCurrent.Samples = make(map[string]SampledValue, len(current.Samples))
|
|
|
|
for k, v := range current.Samples {
|
2019-06-19 12:50:48 +00:00
|
|
|
copyCurrent.Samples[k] = v.deepCopy()
|
2018-04-05 16:21:32 +00:00
|
|
|
}
|
|
|
|
current.RUnlock()
|
|
|
|
|
2016-02-13 00:50:37 +00:00
|
|
|
return intervals
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) getExistingInterval(intv time.Time) *IntervalMetrics {
|
|
|
|
i.intervalLock.RLock()
|
|
|
|
defer i.intervalLock.RUnlock()
|
|
|
|
|
|
|
|
n := len(i.intervals)
|
|
|
|
if n > 0 && i.intervals[n-1].Interval == intv {
|
|
|
|
return i.intervals[n-1]
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i *InmemSink) createInterval(intv time.Time) *IntervalMetrics {
|
|
|
|
i.intervalLock.Lock()
|
|
|
|
defer i.intervalLock.Unlock()
|
|
|
|
|
|
|
|
// Check for an existing interval
|
|
|
|
n := len(i.intervals)
|
|
|
|
if n > 0 && i.intervals[n-1].Interval == intv {
|
|
|
|
return i.intervals[n-1]
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add the current interval
|
|
|
|
current := NewIntervalMetrics(intv)
|
|
|
|
i.intervals = append(i.intervals, current)
|
|
|
|
n++
|
|
|
|
|
|
|
|
// Truncate the intervals if they are too long
|
|
|
|
if n >= i.maxIntervals {
|
|
|
|
copy(i.intervals[0:], i.intervals[n-i.maxIntervals:])
|
|
|
|
i.intervals = i.intervals[:i.maxIntervals]
|
|
|
|
}
|
|
|
|
return current
|
|
|
|
}
|
|
|
|
|
|
|
|
// getInterval returns the current interval to write to
|
|
|
|
func (i *InmemSink) getInterval() *IntervalMetrics {
|
|
|
|
intv := time.Now().Truncate(i.interval)
|
|
|
|
if m := i.getExistingInterval(intv); m != nil {
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
return i.createInterval(intv)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Flattens the key for formatting, removes spaces
|
|
|
|
func (i *InmemSink) flattenKey(parts []string) string {
|
2017-08-07 23:37:52 +00:00
|
|
|
buf := &bytes.Buffer{}
|
|
|
|
|
2020-07-23 18:37:33 +00:00
|
|
|
joined := strings.Join(parts, ".")
|
|
|
|
|
|
|
|
spaceReplacer.WriteString(buf, joined)
|
2017-08-07 23:37:52 +00:00
|
|
|
|
|
|
|
return buf.String()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Flattens the key for formatting along with its labels, removes spaces
|
|
|
|
func (i *InmemSink) flattenKeyLabels(parts []string, labels []Label) (string, string) {
|
2020-07-23 18:37:33 +00:00
|
|
|
key := i.flattenKey(parts)
|
|
|
|
buf := bytes.NewBufferString(key)
|
2017-08-07 23:37:52 +00:00
|
|
|
|
|
|
|
for _, label := range labels {
|
2020-07-23 18:37:33 +00:00
|
|
|
spaceReplacer.WriteString(buf, fmt.Sprintf(";%s=%s", label.Name, label.Value))
|
2017-08-07 23:37:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return buf.String(), key
|
2016-02-13 00:50:37 +00:00
|
|
|
}
|