Merge pull request #1732 from weaveworks/metric-to-slice

Use slices instead of linked lists for Metric
This commit is contained in:
Alfonso Acosta
2016-08-02 13:08:26 +01:00
committed by GitHub
19 changed files with 255 additions and 334 deletions

View File

@@ -21,6 +21,6 @@ func respondWith(w http.ResponseWriter, code int, response interface{}) {
w.WriteHeader(code)
encoder := codec.NewEncoder(w, &codec.JsonHandle{})
if err := encoder.Encode(response); err != nil {
log.Errorf("Error encdoing response: %v", err)
log.Errorf("Error encoding response: %v", err)
}
}

View File

@@ -360,21 +360,27 @@ func (c *container) NetworkInfo(localAddrs []net.IP) report.Sets {
}
func (c *container) memoryUsageMetric(stats []docker.Stats) report.Metric {
result := report.MakeMetric()
for _, s := range stats {
result = result.Add(s.Read, float64(s.MemoryStats.Usage)).WithMax(float64(s.MemoryStats.Limit))
var max float64
samples := make([]report.Sample, len(stats))
for i, s := range stats {
samples[i].Timestamp = s.Read
samples[i].Value = float64(s.MemoryStats.Usage)
if float64(s.MemoryStats.Limit) > max {
max = float64(s.MemoryStats.Limit)
}
}
return result
return report.MakeMetric(samples).WithMax(max)
}
func (c *container) cpuPercentMetric(stats []docker.Stats) report.Metric {
result := report.MakeMetric()
if len(stats) < 2 {
return result
return report.MakeMetric(nil)
}
samples := make([]report.Sample, len(stats)-1)
var max float64
previous := stats[0]
for _, s := range stats[1:] {
for i, s := range stats[1:] {
// Copies from docker/api/client/stats.go#L205
cpuDelta := float64(s.CPUStats.CPUUsage.TotalUsage - previous.CPUStats.CPUUsage.TotalUsage)
systemDelta := float64(s.CPUStats.SystemCPUUsage - previous.CPUStats.SystemCPUUsage)
@@ -382,14 +388,15 @@ func (c *container) cpuPercentMetric(stats []docker.Stats) report.Metric {
if systemDelta > 0.0 && cpuDelta > 0.0 {
cpuPercent = (cpuDelta / systemDelta) * float64(len(s.CPUStats.CPUUsage.PercpuUsage)) * 100.0
}
result = result.Add(s.Read, cpuPercent)
samples[i].Timestamp = s.Read
samples[i].Value = cpuPercent
available := float64(len(s.CPUStats.CPUUsage.PercpuUsage)) * 100.0
if available >= result.Max {
result.Max = available
if available >= max {
max = available
}
previous = s
}
return result
return report.MakeMetric(samples).WithMax(max)
}
func (c *container) metrics() report.Metrics {

View File

@@ -92,8 +92,8 @@ func TestContainer(t *testing.T) {
docker.RestartContainer, docker.StopContainer, docker.PauseContainer,
docker.AttachContainer, docker.ExecContainer,
).WithMetrics(report.Metrics{
"docker_cpu_total_usage": report.MakeMetric(),
"docker_memory_usage": report.MakeMetric().Add(now, 12345).WithMax(45678),
"docker_cpu_total_usage": report.MakeMetric(nil),
"docker_memory_usage": report.MakeSingletonMetric(now, 12345).WithMax(45678),
}).WithParents(report.EmptySets.
Add(report.ContainerImage, report.MakeStringSet(report.MakeContainerImageNodeID("baz"))),
)

View File

@@ -125,9 +125,9 @@ func (r *Reporter) Report() (report.Report, error) {
now := mtime.Now()
metrics := GetLoad(now)
cpuUsage, max := GetCPUUsagePercent()
metrics[CPUUsage] = report.MakeMetric().Add(now, cpuUsage).WithMax(max)
metrics[CPUUsage] = report.MakeSingletonMetric(now, cpuUsage).WithMax(max)
memoryUsage, max := GetMemoryUsageBytes()
metrics[MemoryUsage] = report.MakeMetric().Add(now, memoryUsage).WithMax(max)
metrics[MemoryUsage] = report.MakeSingletonMetric(now, memoryUsage).WithMax(max)
rep.Host.AddNode(
report.MakeNodeWith(report.MakeHostNodeID(r.hostID), map[string]string{

View File

@@ -20,9 +20,9 @@ func TestReporter(t *testing.T) {
hostname = "hostname"
timestamp = time.Now()
metrics = report.Metrics{
host.Load1: report.MakeMetric().Add(timestamp, 1.0),
host.CPUUsage: report.MakeMetric().Add(timestamp, 30.0).WithMax(100.0),
host.MemoryUsage: report.MakeMetric().Add(timestamp, 60.0).WithMax(100.0),
host.Load1: report.MakeSingletonMetric(timestamp, 1.0),
host.CPUUsage: report.MakeSingletonMetric(timestamp, 30.0).WithMax(100.0),
host.MemoryUsage: report.MakeSingletonMetric(timestamp, 60.0).WithMax(100.0),
}
uptime = "278h55m43s"
kernel = "release version"
@@ -88,10 +88,10 @@ func TestReporter(t *testing.T) {
// Should have metrics
for key, want := range metrics {
wantSample := want.LastSample()
wantSample, _ := want.LastSample()
if metric, ok := node.Metrics[key]; !ok {
t.Errorf("Expected %s metric, but not found", key)
} else if sample := metric.LastSample(); sample == nil {
} else if sample, ok := metric.LastSample(); !ok {
t.Errorf("Expected %s metric to have a sample, but there were none", key)
} else if sample.Value != wantSample.Value {
t.Errorf("Expected %s metric sample %f, got %f", key, wantSample, sample.Value)

View File

@@ -45,7 +45,7 @@ var GetLoad = func(now time.Time) report.Metrics {
return nil
}
return report.Metrics{
Load1: report.MakeMetric().Add(now, one),
Load1: report.MakeSingletonMetric(now, one),
}
}

View File

@@ -45,7 +45,7 @@ var GetLoad = func(now time.Time) report.Metrics {
return nil
}
return report.Metrics{
Load1: report.MakeMetric().Add(now, one),
Load1: report.MakeSingletonMetric(now, one),
}
}

View File

@@ -69,9 +69,6 @@ func (m mockPublisher) Stop() {
}
func TestProbe(t *testing.T) {
// marshalling->unmarshaling is not idempotent due to `json:"omitempty"`
// tags, transforming empty slices into nils. So, we make DeepEqual
// happy by setting empty `json:"omitempty"` entries to nil
const probeID = "probeid"
now := time.Now()
mtime.NowForce(now)
@@ -79,8 +76,11 @@ func TestProbe(t *testing.T) {
want := report.MakeReport()
node := report.MakeNodeWith("a", map[string]string{"b": "c"})
node.Metrics = nil // omitempty
// omitempty
// marshalling->unmarshaling is not idempotent due to `json:"omitempty"`
// tags, transforming empty slices into nils. So, we make DeepEqual
// happy by setting empty `json:"omitempty"` entries to nil
node.Metrics = nil
want.Endpoint.Controls = nil
want.Process.Controls = nil
want.Container.Controls = nil

View File

@@ -99,11 +99,11 @@ func (r *Reporter) processTopology() (report.Topology, error) {
if deltaTotal > 0 {
cpuUsage := float64(p.Jiffies-prev.Jiffies) / float64(deltaTotal) * 100.
node = node.WithMetric(CPUUsage, report.MakeMetric().Add(now, cpuUsage).WithMax(maxCPU))
node = node.WithMetric(CPUUsage, report.MakeSingletonMetric(now, cpuUsage).WithMax(maxCPU))
}
node = node.WithMetric(MemoryUsage, report.MakeMetric().Add(now, float64(p.RSSBytes)).WithMax(float64(p.RSSBytesLimit)))
node = node.WithMetric(OpenFilesCount, report.MakeMetric().Add(now, float64(p.OpenFilesCount)).WithMax(float64(p.OpenFilesLimit)))
node = node.WithMetric(MemoryUsage, report.MakeSingletonMetric(now, float64(p.RSSBytes)).WithMax(float64(p.RSSBytesLimit)))
node = node.WithMetric(OpenFilesCount, report.MakeSingletonMetric(now, float64(p.OpenFilesCount)).WithMax(float64(p.OpenFilesLimit)))
t.AddNode(node)
})

View File

@@ -62,7 +62,7 @@ func TestReporter(t *testing.T) {
}
if memoryUsage, ok := node.Metrics[process.MemoryUsage]; !ok {
t.Errorf("Expected memory usage metric, but not found")
} else if sample := memoryUsage.LastSample(); sample == nil {
} else if sample, ok := memoryUsage.LastSample(); !ok {
t.Errorf("Expected memory usage metric to have a sample, but there were none")
} else if sample.Value != 0. {
t.Errorf("Expected memory usage metric sample %f, got %f", 0., sample.Value)

View File

@@ -117,7 +117,7 @@ func TestNodeMetrics(t *testing.T) {
func TestMetricRowSummary(t *testing.T) {
var (
now = time.Now()
metric = report.MakeMetric().Add(now, 1.234)
metric = report.MakeSingletonMetric(now, 1.234)
row = report.MetricRow{
ID: "id",
Format: "format",

View File

@@ -47,10 +47,10 @@ func TestSummaries(t *testing.T) {
// It should summarize nodes' metrics
{
t1, t2 := mtime.Now().Add(-1*time.Minute), mtime.Now()
metric := report.MakeMetric().Add(t1, 1).Add(t2, 2)
metric := report.MakeMetric([]report.Sample{{t1, 1}, {t2, 2}})
input := fixture.Report.Copy()
input.Process.Nodes[fixture.ClientProcess1NodeID] = input.Process.Nodes[fixture.ClientProcess1NodeID].WithMetrics(report.Metrics{process.CPUUsage: metric})
input.Process.Nodes[fixture.ClientProcess1NodeID].Metrics[process.CPUUsage] = metric
have := detailed.Summaries(input, render.ProcessRenderer.Render(input, nil))
node, ok := have[fixture.ClientProcess1NodeID]

View File

@@ -31,20 +31,20 @@ func TestPropagateSingleMetrics(t *testing.T) {
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
),
),
topology: report.Container,
output: report.Nodes{
"a": report.MakeNode("a").WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}).WithChildren(
report.MakeNodeSet(
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
),
),
@@ -57,30 +57,30 @@ func TestPropagateSingleMetrics(t *testing.T) {
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
report.MakeNode("child2").
WithTopology("otherTopology").
WithMetrics(report.Metrics{
"metric2": report.MakeMetric(),
"metric2": report.MakeMetric(nil),
}),
),
),
topology: report.Container,
output: report.Nodes{
"a": report.MakeNode("a").WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}).WithChildren(
report.MakeNodeSet(
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
report.MakeNode("child2").
WithTopology("otherTopology").
WithMetrics(report.Metrics{
"metric2": report.MakeMetric(),
"metric2": report.MakeMetric(nil),
}),
),
),
@@ -93,12 +93,12 @@ func TestPropagateSingleMetrics(t *testing.T) {
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
report.MakeNode("child2").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric2": report.MakeMetric(),
"metric2": report.MakeMetric(nil),
}),
),
),
@@ -109,12 +109,12 @@ func TestPropagateSingleMetrics(t *testing.T) {
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
report.MakeNode("child2").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric2": report.MakeMetric(),
"metric2": report.MakeMetric(nil),
}),
),
),
@@ -127,32 +127,32 @@ func TestPropagateSingleMetrics(t *testing.T) {
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
report.MakeNode("child2").
WithLatest(report.DoesNotMakeConnections, now, "").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric2": report.MakeMetric(),
"metric2": report.MakeMetric(nil),
}),
),
),
topology: report.Container,
output: report.Nodes{
"a": report.MakeNode("a").WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}).WithChildren(
report.MakeNodeSet(
report.MakeNode("child1").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric1": report.MakeMetric(),
"metric1": report.MakeMetric(nil),
}),
report.MakeNode("child2").
WithLatest(report.DoesNotMakeConnections, now, "").
WithTopology(report.Container).
WithMetrics(report.Metrics{
"metric2": report.MakeMetric(),
"metric2": report.MakeMetric(nil),
}),
),
),

View File

@@ -4,7 +4,6 @@ import (
"time"
"github.com/ugorji/go/codec"
"github.com/weaveworks/scope/common/mtime"
)
@@ -50,7 +49,7 @@ func (cs Controls) AddControls(controls []Control) {
}
// NodeControls represent the individual controls that are valid for a given
// node at a given point in time. Its is immutable. A zero-value for Timestamp
// node at a given point in time. It's immutable. A zero-value for Timestamp
// indicated this NodeControls is 'not set'.
type NodeControls struct {
Timestamp time.Time
@@ -86,15 +85,17 @@ func (nc NodeControls) Add(ids ...string) NodeControls {
}
}
// WireNodeControls is the intermediate type for json encoding.
type WireNodeControls struct {
// WireNodeControls is the intermediate type for encoding/decoding.
// Only needed for backwards compatibility with probes
// (time.Time is encoded in binary in MsgPack)
type wireNodeControls struct {
Timestamp string `json:"timestamp,omitempty"`
Controls StringSet `json:"controls,omitempty"`
}
// CodecEncodeSelf implements codec.Selfer
func (nc *NodeControls) CodecEncodeSelf(encoder *codec.Encoder) {
encoder.Encode(WireNodeControls{
encoder.Encode(wireNodeControls{
Timestamp: renderTime(nc.Timestamp),
Controls: nc.Controls,
})
@@ -102,7 +103,7 @@ func (nc *NodeControls) CodecEncodeSelf(encoder *codec.Encoder) {
// CodecDecodeSelf implements codec.Selfer
func (nc *NodeControls) CodecDecodeSelf(decoder *codec.Decoder) {
in := WireNodeControls{}
in := wireNodeControls{}
if err := decoder.Decode(&in); err != nil {
return
}

View File

@@ -54,6 +54,8 @@ func (*MetricRow) UnmarshalJSON(b []byte) error {
panic("UnmarshalJSON shouldn't be used, use CodecDecodeSelf instead")
}
// Needed to flatten the fields for backwards compatibility with probes
// (time.Time is encoded in binary in MsgPack)
type wiredMetricRow struct {
ID string `json:"id"`
Label string `json:"label"`

View File

@@ -28,7 +28,7 @@ func (t MetricTemplate) MetricRows(n Node) []MetricRow {
Priority: t.Priority,
Metric: &metric,
}
if s := metric.LastSample(); s != nil {
if s, ok := metric.LastSample(); ok {
row.Value = toFixed(s.Value, 2)
}
return []MetricRow{row}

View File

@@ -1,13 +1,10 @@
package report
import (
"bytes"
"encoding/gob"
"math"
"time"
"github.com/ugorji/go/codec"
"github.com/weaveworks/ps"
)
// Metrics is a string->metric map.
@@ -24,14 +21,18 @@ func (m Metrics) Lookup(key string) (Metric, bool) {
func (m Metrics) Merge(other Metrics) Metrics {
result := m.Copy()
for k, v := range other {
result[k] = result[k].Merge(v)
if rv, ok := result[k]; ok {
result[k] = rv.Merge(v)
} else {
result[k] = v
}
}
return result
}
// Copy returns a value copy of the sets map.
func (m Metrics) Copy() Metrics {
result := Metrics{}
result := make(Metrics, len(m))
for k, v := range m {
result[k] = v
}
@@ -41,7 +42,7 @@ func (m Metrics) Copy() Metrics {
// Metric is a list of timeseries data with some metadata. Clients must use the
// Add method to add values. Metrics are immutable.
type Metric struct {
Samples ps.List
Samples []Sample
Min, Max float64
First, Last time.Time
}
@@ -52,28 +53,55 @@ type Sample struct {
Value float64 `json:"value"`
}
var nilMetric = Metric{Samples: ps.NewList()}
// MakeMetric makes a new Metric.
func MakeMetric() Metric {
return nilMetric
}
// Copy returns a value copy of the Metric. Metric is immutable, so we can skip
// this.
func (m Metric) Copy() Metric {
return m
}
// WithFirst returns a fresh copy of m, with First set to t.
func (m Metric) WithFirst(t time.Time) Metric {
// MakeSingletonMetric makes a metric with a single value
func MakeSingletonMetric(t time.Time, v float64) Metric {
return Metric{
Samples: m.Samples,
Max: m.Max,
Min: m.Min,
Samples: []Sample{{t, v}},
Min: v,
Max: v,
First: t,
Last: m.Last,
Last: t,
}
}
// MakeMetric makes a new Metric from unique samples incrementally ordered in
// time.
func MakeMetric(samples []Sample) Metric {
if len(samples) < 1 {
return Metric{}
}
var (
min = samples[0].Value
max = samples[0].Value
)
for i := 1; i < len(samples); i++ {
if samples[i].Value < min {
min = samples[i].Value
} else if samples[i].Value > max {
max = samples[i].Value
}
}
return Metric{
Samples: samples,
Min: min,
Max: max,
First: samples[0].Timestamp,
Last: samples[len(samples)-1].Timestamp,
}
}
// Copy returns a copy of the Metric.
func (m Metric) Copy() Metric {
c := m
if c.Samples != nil {
c.Samples = make([]Sample, len(m.Samples))
copy(c.Samples, m.Samples)
}
return c
}
// WithMax returns a fresh copy of m, with Max set to max
@@ -89,10 +117,7 @@ func (m Metric) WithMax(max float64) Metric {
// Len returns the number of samples in the metric.
func (m Metric) Len() int {
if m.Samples == nil {
return 0
}
return m.Samples.Size()
return len(m.Samples)
}
func first(t1, t2 time.Time) time.Time {
@@ -109,83 +134,66 @@ func last(t1, t2 time.Time) time.Time {
return t2
}
// concat returns a new list formed by adding each element of acc to curr
// acc or curr can be nil.
func concat(acc []interface{}, curr ps.List) ps.List {
if curr == nil {
curr = ps.NewList()
}
for i := len(acc) - 1; i >= 0; i-- {
curr = curr.Cons(acc[i])
}
return curr
}
// Add returns a new Metric with (t, v) added to its Samples. Add is the only
// valid way to grow a Metric.
func (m Metric) Add(t time.Time, v float64) Metric {
// Find the first element which is before you element, and insert
// your new element in the list. NB we want to dedupe entries with
// equal timestamps.
// This should be O(1) to insert a latest element, and O(n) in general.
curr, acc := m.Samples, make([]interface{}, 0, m.Len()+1)
for {
if curr == nil || curr.IsNil() {
break
}
currSample := curr.Head().(Sample)
if currSample.Timestamp.Equal(t) {
curr = curr.Tail()
break
}
if currSample.Timestamp.Before(t) {
break
}
acc, curr = append(acc, curr.Head()), curr.Tail()
}
acc = append(acc, Sample{t, v})
curr = concat(acc, curr)
return Metric{
Samples: curr,
Max: math.Max(m.Max, v),
Min: math.Min(m.Min, v),
First: first(m.First, t),
Last: last(m.Last, t),
}
}
// Merge combines the two Metrics and returns a new result.
func (m Metric) Merge(other Metric) Metric {
// Merge two lists of samples in O(n)
curr1, curr2, acc := m.Samples, other.Samples, make([]interface{}, 0, m.Len()+other.Len())
var newSamples ps.List
// Optimize the empty and non-overlapping case since they are very common
switch {
case len(m.Samples) == 0:
return other
case len(other.Samples) == 0:
return m
case other.First.After(m.Last):
samplesOut := make([]Sample, len(m.Samples)+len(other.Samples))
copy(samplesOut, m.Samples)
copy(samplesOut[len(m.Samples):], other.Samples)
return Metric{
Samples: samplesOut,
Max: math.Max(m.Max, other.Max),
Min: math.Min(m.Min, other.Min),
First: m.First,
Last: other.Last,
}
case m.First.After(other.Last):
samplesOut := make([]Sample, len(m.Samples)+len(other.Samples))
copy(samplesOut, other.Samples)
copy(samplesOut[len(other.Samples):], m.Samples)
return Metric{
Samples: samplesOut,
Max: math.Max(m.Max, other.Max),
Min: math.Min(m.Min, other.Min),
First: other.First,
Last: m.Last,
}
}
// Merge two lists of Samples in O(n)
samplesOut := make([]Sample, 0, len(m.Samples)+len(other.Samples))
mI, otherI := 0, 0
for {
if curr1 == nil || curr1.IsNil() {
newSamples = concat(acc, curr2)
if otherI >= len(other.Samples) {
samplesOut = append(samplesOut, m.Samples[mI:]...)
break
} else if curr2 == nil || curr2.IsNil() {
newSamples = concat(acc, curr1)
} else if mI >= len(m.Samples) {
samplesOut = append(samplesOut, other.Samples[otherI:]...)
break
}
s1 := curr1.Head().(Sample)
s2 := curr2.Head().(Sample)
if s1.Timestamp.Equal(s2.Timestamp) {
curr1, curr2, acc = curr1.Tail(), curr2.Tail(), append(acc, s1)
} else if s1.Timestamp.After(s2.Timestamp) {
curr1, acc = curr1.Tail(), append(acc, s1)
if m.Samples[mI].Timestamp.Equal(other.Samples[otherI].Timestamp) {
samplesOut = append(samplesOut, m.Samples[mI])
mI++
otherI++
} else if m.Samples[mI].Timestamp.Before(other.Samples[otherI].Timestamp) {
samplesOut = append(samplesOut, m.Samples[mI])
mI++
} else {
curr2, acc = curr2.Tail(), append(acc, s2)
samplesOut = append(samplesOut, other.Samples[otherI])
otherI++
}
}
return Metric{
Samples: newSamples,
Samples: samplesOut,
Max: math.Max(m.Max, other.Max),
Min: math.Min(m.Min, other.Min),
First: first(m.First, other.First),
@@ -195,14 +203,14 @@ func (m Metric) Merge(other Metric) Metric {
// Div returns a new copy of the metric, with each value divided by n.
func (m Metric) Div(n float64) Metric {
curr, acc := m.Samples, ps.NewList()
for curr != nil && !curr.IsNil() {
s := curr.Head().(Sample)
curr, acc = curr.Tail(), acc.Cons(Sample{s.Timestamp, s.Value / n})
samplesOut := make([]Sample, len(m.Samples), len(m.Samples))
for i := range m.Samples {
samplesOut[i].Value = m.Samples[i].Value / n
samplesOut[i].Timestamp = m.Samples[i].Timestamp
}
acc = acc.Reverse()
return Metric{
Samples: acc,
Samples: samplesOut,
Max: m.Max / n,
Min: m.Min / n,
First: m.First,
@@ -210,20 +218,20 @@ func (m Metric) Div(n float64) Metric {
}
}
// LastSample returns the last sample in the metric, or nil if there are no
// samples.
func (m Metric) LastSample() *Sample {
if m.Samples == nil || m.Samples.IsNil() {
return nil
// LastSample obtains the last sample of the metric
func (m Metric) LastSample() (Sample, bool) {
if m.Samples == nil {
return Sample{}, false
}
s := m.Samples.Head().(Sample)
return &s
return m.Samples[len(m.Samples)-1], true
}
// WireMetrics is the on-the-wire representation of Metrics.
// Only needed for backwards compatibility with probes
// (time.Time is encoded in binary in MsgPack)
type WireMetrics struct {
Samples []Sample `json:"samples,omitempty"` // On the wire, samples are sorted oldest to newest,
Min float64 `json:"min"` // the opposite order to how we store them internally.
Samples []Sample `json:"samples,omitempty"`
Min float64 `json:"min"`
Max float64 `json:"max"`
First string `json:"first,omitempty"`
Last string `json:"last,omitempty"`
@@ -244,14 +252,8 @@ func parseTime(s string) time.Time {
// ToIntermediate converts the metric to a representation suitable
// for serialization.
func (m Metric) ToIntermediate() WireMetrics {
samples := []Sample{}
if m.Samples != nil {
m.Samples.Reverse().ForEach(func(s interface{}) {
samples = append(samples, s.(Sample))
})
}
return WireMetrics{
Samples: samples,
Samples: m.Samples,
Max: m.Max,
Min: m.Min,
First: renderTime(m.First),
@@ -262,12 +264,8 @@ func (m Metric) ToIntermediate() WireMetrics {
// FromIntermediate obtains the metric from a representation suitable
// for serialization.
func (m WireMetrics) FromIntermediate() Metric {
samples := ps.NewList()
for _, s := range m.Samples {
samples = samples.Cons(s)
}
return Metric{
Samples: samples,
Samples: m.Samples,
Max: m.Max,
Min: m.Min,
First: parseTime(m.First),
@@ -299,20 +297,3 @@ func (Metric) MarshalJSON() ([]byte, error) {
func (*Metric) UnmarshalJSON(b []byte) error {
panic("UnmarshalJSON shouldn't be used, use CodecDecodeSelf instead")
}
// GobEncode implements gob.Marshaller
func (m Metric) GobEncode() ([]byte, error) {
buf := bytes.Buffer{}
err := gob.NewEncoder(&buf).Encode(m.ToIntermediate())
return buf.Bytes(), err
}
// GobDecode implements gob.Unmarshaller
func (m *Metric) GobDecode(input []byte) error {
in := WireMetrics{}
if err := gob.NewDecoder(bytes.NewBuffer(input)).Decode(&in); err != nil {
return err
}
*m = in.FromIntermediate()
return nil
}

View File

@@ -19,17 +19,17 @@ func TestMetricsMerge(t *testing.T) {
t4 := time.Now().Add(3 * time.Minute)
metrics1 := report.Metrics{
"metric1": report.MakeMetric().Add(t1, 0.1).Add(t2, 0.2),
"metric2": report.MakeMetric().Add(t3, 0.3),
"metric1": report.MakeMetric([]report.Sample{{t1, 0.1}, {t2, 0.2}}),
"metric2": report.MakeSingletonMetric(t3, 0.3),
}
metrics2 := report.Metrics{
"metric2": report.MakeMetric().Add(t4, 0.4),
"metric3": report.MakeMetric().Add(t1, 0.1).Add(t2, 0.2),
"metric2": report.MakeSingletonMetric(t4, 0.4),
"metric3": report.MakeMetric([]report.Sample{{t1, 0.1}, {t2, 0.2}}),
}
want := report.Metrics{
"metric1": report.MakeMetric().Add(t1, 0.1).Add(t2, 0.2),
"metric2": report.MakeMetric().Add(t3, 0.3).Add(t4, 0.4),
"metric3": report.MakeMetric().Add(t1, 0.1).Add(t2, 0.2),
"metric1": report.MakeMetric([]report.Sample{{t1, 0.1}, {t2, 0.2}}),
"metric2": report.MakeMetric([]report.Sample{{t3, 0.3}, {t4, 0.4}}),
"metric3": report.MakeMetric([]report.Sample{{t1, 0.1}, {t2, 0.2}}),
}
have := metrics1.Merge(metrics2)
if !reflect.DeepEqual(want, have) {
@@ -40,7 +40,7 @@ func TestMetricsMerge(t *testing.T) {
func TestMetricsCopy(t *testing.T) {
t1 := time.Now()
want := report.Metrics{
"metric1": report.MakeMetric().Add(t1, 0.1),
"metric1": report.MakeSingletonMetric(t1, 0.1),
}
delete(want.Copy(), "metric1") // Modify a copy
have := want.Copy() // Check the original wasn't affected
@@ -65,66 +65,24 @@ func checkMetric(t *testing.T, metric report.Metric, first, last time.Time, min,
}
func TestMetricFirstLastMinMax(t *testing.T) {
metric := report.MakeMetric()
var zero time.Time
checkMetric(t, report.MakeMetric(nil), time.Time{}, time.Time{}, 0.0, 0.0)
t1 := time.Now()
t2 := time.Now().Add(1 * time.Minute)
metric1 := report.MakeMetric([]report.Sample{{t1, -0.1}, {t2, 0.2}})
checkMetric(t, metric1, t1, t2, -0.1, 0.2)
checkMetric(t, metric1.Merge(metric1), t1, t2, -0.1, 0.2)
t3 := time.Now().Add(2 * time.Minute)
t4 := time.Now().Add(3 * time.Minute)
other := report.MakeMetric()
other.Max = 5
other.Min = -5
other.First = t1.Add(-1 * time.Minute)
other.Last = t4.Add(1 * time.Minute)
metric2 := report.MakeMetric([]report.Sample{{t3, 0.31}, {t4, 0.4}})
tests := []struct {
f func(report.Metric) report.Metric
first, last time.Time
min, max float64
}{
{nil, zero, zero, 0, 0},
{func(m report.Metric) report.Metric { return m.Add(t2, 2) }, t2, t2, 0, 2},
{func(m report.Metric) report.Metric { return m.Add(t1, 1) }, t1, t2, 0, 2},
{func(m report.Metric) report.Metric { return m.Add(t3, -1) }, t1, t3, -1, 2},
{func(m report.Metric) report.Metric { return m.Add(t4, 3) }, t1, t4, -1, 3},
{func(m report.Metric) report.Metric { return m.Merge(other) }, t1.Add(-1 * time.Minute), t4.Add(1 * time.Minute), -5, 5},
}
for _, test := range tests {
oldFirst, oldLast, oldMin, oldMax := metric.First, metric.Last, metric.Min, metric.Max
oldMetric := metric
if test.f != nil {
metric = test.f(metric)
}
// Check it didn't modify the old one
checkMetric(t, oldMetric, oldFirst, oldLast, oldMin, oldMax)
// Check the new one is as expected
checkMetric(t, metric, test.first, test.last, test.min, test.max)
}
}
func TestMetricAdd(t *testing.T) {
s := []report.Sample{
{time.Now(), 0.1},
{time.Now().Add(1 * time.Minute), 0.2},
{time.Now().Add(2 * time.Minute), 0.3},
}
have := report.MakeMetric().
Add(s[0].Timestamp, s[0].Value).
Add(s[2].Timestamp, s[2].Value). // Keeps sorted
Add(s[1].Timestamp, s[1].Value).
Add(s[2].Timestamp, 0.5) // Overwrites duplicate timestamps
want := report.MakeMetric().
Add(s[0].Timestamp, s[0].Value).
Add(s[1].Timestamp, s[1].Value).
Add(s[2].Timestamp, 0.5)
if !reflect.DeepEqual(want, have) {
t.Errorf("diff: %s", test.Diff(want, have))
}
checkMetric(t, metric2, t3, t4, 0.31, 0.4)
checkMetric(t, metric1.Merge(metric2), t1, t4, -0.1, 0.4)
checkMetric(t, metric2.Merge(metric1), t1, t4, -0.1, 0.4)
}
func TestMetricMerge(t *testing.T) {
@@ -133,20 +91,12 @@ func TestMetricMerge(t *testing.T) {
t3 := time.Now().Add(2 * time.Minute)
t4 := time.Now().Add(3 * time.Minute)
metric1 := report.MakeMetric().
Add(t2, 0.2).
Add(t3, 0.31)
metric1 := report.MakeMetric([]report.Sample{{t2, 0.2}, {t3, 0.31}})
metric2 := report.MakeMetric().
Add(t1, -0.1).
Add(t3, 0.3).
Add(t4, 0.4)
metric2 := report.MakeMetric([]report.Sample{{t1, -0.1}, {t3, 0.3}, {t4, 0.4}})
want := report.MakeMetric([]report.Sample{{t1, -0.1}, {t2, 0.2}, {t3, 0.31}, {t4, 0.4}})
want := report.MakeMetric().
Add(t1, -0.1).
Add(t2, 0.2).
Add(t3, 0.31).
Add(t4, 0.4)
have := metric1.Merge(metric2)
if !reflect.DeepEqual(want, have) {
t.Errorf("diff: %s", test.Diff(want, have))
@@ -159,8 +109,8 @@ func TestMetricMerge(t *testing.T) {
if !metric1.Last.Equal(t3) {
t.Errorf("Expected metric1.Last == %q, but was: %q", t3, metric1.Last)
}
if metric1.Min != 0.0 {
t.Errorf("Expected metric1.Min == %f, but was: %f", 0.0, metric1.Min)
if metric1.Min != 0.2 {
t.Errorf("Expected metric1.Min == %f, but was: %f", 0.2, metric1.Min)
}
if metric1.Max != 0.31 {
t.Errorf("Expected metric1.Max == %f, but was: %f", 0.31, metric1.Max)
@@ -173,13 +123,13 @@ func TestMetricMerge(t *testing.T) {
}
func TestMetricCopy(t *testing.T) {
want := report.MakeMetric()
want := report.MakeMetric(nil)
have := want.Copy()
if !reflect.DeepEqual(want, have) {
t.Errorf("diff: %s", test.Diff(want, have))
}
want = report.MakeMetric().Add(time.Now(), 1)
want = report.MakeSingletonMetric(time.Now(), 1)
have = want.Copy()
if !reflect.DeepEqual(want, have) {
t.Errorf("diff: %s", test.Diff(want, have))
@@ -190,12 +140,8 @@ func TestMetricDiv(t *testing.T) {
t1 := time.Now()
t2 := time.Now().Add(1 * time.Minute)
want := report.MakeMetric().
Add(t1, -2).
Add(t2, 2)
beforeDiv := report.MakeMetric().
Add(t1, -2048).
Add(t2, 2048)
want := report.MakeMetric([]report.Sample{{t1, -2}, {t2, 2}})
beforeDiv := report.MakeMetric([]report.Sample{{t1, -2048}, {t2, 2048}})
have := beforeDiv.Div(1024)
if !reflect.DeepEqual(want, have) {
t.Errorf("diff: %s", test.Diff(want, have))
@@ -218,55 +164,39 @@ func TestMetricMarshalling(t *testing.T) {
{Timestamp: t4, Value: 0.4},
}
want := report.MakeMetric()
for _, sample := range wantSamples {
want = want.Add(sample.Timestamp, sample.Value)
}
want := report.MakeMetric(wantSamples)
// gob
{
gobs, err := want.GobEncode()
if err != nil {
for _, h := range []codec.Handle{
codec.Handle(&codec.MsgpackHandle{}),
codec.Handle(&codec.JsonHandle{}),
} {
buf := &bytes.Buffer{}
encoder := codec.NewEncoder(buf, h)
if err := encoder.Encode(want); err != nil {
t.Fatal(err)
}
bufCopy := bytes.NewBuffer(buf.Bytes())
decoder := codec.NewDecoder(buf, h)
var have report.Metric
have.GobDecode(gobs)
if err := decoder.Decode(&have); err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(want, have) {
t.Error(test.Diff(want, have))
}
}
// others
{
for _, h := range []codec.Handle{
codec.Handle(&codec.MsgpackHandle{}),
codec.Handle(&codec.JsonHandle{}),
} {
buf := &bytes.Buffer{}
encoder := codec.NewEncoder(buf, h)
want.CodecEncodeSelf(encoder)
bufCopy := bytes.NewBuffer(buf.Bytes())
decoder := codec.NewDecoder(buf, h)
var have report.Metric
have.CodecDecodeSelf(decoder)
if !reflect.DeepEqual(want, have) {
t.Error(test.Diff(want, have))
}
// extra check for samples
decoder = codec.NewDecoder(bufCopy, h)
var wire struct {
Samples []report.Sample `json:"samples"`
}
if err := decoder.Decode(&wire); err != nil {
t.Error(err)
}
if !reflect.DeepEqual(wantSamples, wire.Samples) {
t.Error(test.Diff(wantSamples, wire.Samples))
}
// extra check for samples
decoder = codec.NewDecoder(bufCopy, h)
var wire struct {
Samples []report.Sample `json:"samples"`
}
if err := decoder.Decode(&wire); err != nil {
t.Error(err)
}
if !reflect.DeepEqual(wantSamples, wire.Samples) {
t.Error(test.Diff(wantSamples, wire.Samples))
}
}

View File

@@ -101,22 +101,22 @@ var (
ServiceUID = "service1234"
ServiceNodeID = report.MakeServiceNodeID(ServiceUID)
ClientProcess1CPUMetric = report.MakeMetric().Add(Now, 0.01).WithFirst(Now.Add(-1 * time.Second))
ClientProcess1MemoryMetric = report.MakeMetric().Add(Now, 0.02).WithFirst(Now.Add(-2 * time.Second))
ClientProcess1CPUMetric = report.MakeSingletonMetric(Now.Add(-1*time.Second), 0.01)
ClientProcess1MemoryMetric = report.MakeSingletonMetric(Now.Add(-2*time.Second), 0.02)
ClientContainerCPUMetric = report.MakeMetric().Add(Now, 0.03).WithFirst(Now.Add(-3 * time.Second))
ClientContainerMemoryMetric = report.MakeMetric().Add(Now, 0.04).WithFirst(Now.Add(-4 * time.Second))
ClientContainerCPUMetric = report.MakeSingletonMetric(Now.Add(-3*time.Second), 0.03)
ClientContainerMemoryMetric = report.MakeSingletonMetric(Now.Add(-4*time.Second), 0.04)
ServerContainerCPUMetric = report.MakeMetric().Add(Now, 0.05).WithFirst(Now.Add(-5 * time.Second))
ServerContainerMemoryMetric = report.MakeMetric().Add(Now, 0.06).WithFirst(Now.Add(-6 * time.Second))
ServerContainerCPUMetric = report.MakeSingletonMetric(Now.Add(-5*time.Second), 0.05)
ServerContainerMemoryMetric = report.MakeSingletonMetric(Now.Add(-6*time.Second), 0.06)
ClientHostCPUMetric = report.MakeMetric().Add(Now, 0.07).WithFirst(Now.Add(-7 * time.Second))
ClientHostMemoryMetric = report.MakeMetric().Add(Now, 0.08).WithFirst(Now.Add(-8 * time.Second))
ClientHostLoad1Metric = report.MakeMetric().Add(Now, 0.09).WithFirst(Now.Add(-9 * time.Second))
ClientHostCPUMetric = report.MakeSingletonMetric(Now.Add(-7*time.Second), 0.07)
ClientHostMemoryMetric = report.MakeSingletonMetric(Now.Add(-8*time.Second), 0.08)
ClientHostLoad1Metric = report.MakeSingletonMetric(Now.Add(-9*time.Second), 0.09)
ServerHostCPUMetric = report.MakeMetric().Add(Now, 0.12).WithFirst(Now.Add(-12 * time.Second))
ServerHostMemoryMetric = report.MakeMetric().Add(Now, 0.13).WithFirst(Now.Add(-13 * time.Second))
ServerHostLoad1Metric = report.MakeMetric().Add(Now, 0.14).WithFirst(Now.Add(-14 * time.Second))
ServerHostCPUMetric = report.MakeSingletonMetric(Now.Add(-12*time.Second), 0.12)
ServerHostMemoryMetric = report.MakeSingletonMetric(Now.Add(-13*time.Second), 0.13)
ServerHostLoad1Metric = report.MakeSingletonMetric(Now.Add(-14*time.Second), 0.14)
Report = report.Report{
ID: "test-report",