Add an .inc_by() method to CounterMetric; implement DistributionMetric a neater way

This commit is contained in:
Paul "LeoNerd" Evans 2015-03-10 15:54:16 +00:00
parent 63cb7ece62
commit c1cdd7954d
2 changed files with 16 additions and 26 deletions

View File

@ -64,7 +64,7 @@ class CounterMetric(BaseMetric):
if self.is_scalar(): if self.is_scalar():
self.counts[()] = 0 self.counts[()] = 0
def inc(self, *values): def inc_by(self, incr, *values):
if len(values) != self.dimension(): if len(values) != self.dimension():
raise ValueError("Expected as many values to inc() as labels (%d)" % raise ValueError("Expected as many values to inc() as labels (%d)" %
(self.dimension()) (self.dimension())
@ -73,9 +73,12 @@ class CounterMetric(BaseMetric):
# TODO: should assert that the tag values are all strings # TODO: should assert that the tag values are all strings
if values not in self.counts: if values not in self.counts:
self.counts[values] = 1 self.counts[values] = incr
else: else:
self.counts[values] += 1 self.counts[values] += incr
def inc(self, *values):
self.inc_by(1, *values)
def render_item(self, k): def render_item(self, k):
return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])] return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])]
@ -101,7 +104,7 @@ class CallbackMetric(BaseMetric):
for k in sorted(value.keys())] for k in sorted(value.keys())]
class DistributionMetric(CounterMetric): class DistributionMetric(object):
"""A combination of an event counter and an accumulator, which counts """A combination of an event counter and an accumulator, which counts
both the number of events and accumulates the total value. Typically this both the number of events and accumulates the total value. Typically this
could be used to keep track of method-running times, or other distributions could be used to keep track of method-running times, or other distributions
@ -110,28 +113,16 @@ class DistributionMetric(CounterMetric):
TODO(paul): Try to export some heatmap-style stats? TODO(paul): Try to export some heatmap-style stats?
""" """
def __init__(self, *args, **kwargs): def __init__(self, name, *args, **kwargs):
super(DistributionMetric, self).__init__(*args, **kwargs) self.counts = CounterMetric(name + ":count", **kwargs)
self.totals = CounterMetric(name + ":total", **kwargs)
self.totals = {}
# Scalar metrics are never empty
if self.is_scalar():
self.totals[()] = 0
def inc_by(self, inc, *values): def inc_by(self, inc, *values):
self.inc(*values) self.counts.inc(*values)
self.totals.inc_by(inc, *values)
if values not in self.totals: def render(self):
self.totals[values] = inc return self.counts.render() + self.totals.render()
else:
self.totals[values] += inc
def render_item(self, k):
keystr = self._render_key(k)
return ["%s:count%s %d" % (self.name, keystr, self.counts[k]),
"%s:total%s %d" % (self.name, keystr, self.totals[k])]
class CacheMetric(object): class CacheMetric(object):

View File

@ -35,8 +35,7 @@ class CounterMetricTestCase(unittest.TestCase):
'scalar 1', 'scalar 1',
]) ])
counter.inc() counter.inc_by(2)
counter.inc()
self.assertEquals(counter.render(), [ self.assertEquals(counter.render(), [
'scalar 3' 'scalar 3'
@ -125,8 +124,8 @@ class DistributionMetricTestCase(unittest.TestCase):
self.assertEquals(metric.render(), [ self.assertEquals(metric.render(), [
'queries:count{verb="INSERT"} 1', 'queries:count{verb="INSERT"} 1',
'queries:total{verb="INSERT"} 800',
'queries:count{verb="SELECT"} 2', 'queries:count{verb="SELECT"} 2',
'queries:total{verb="INSERT"} 800',
'queries:total{verb="SELECT"} 500', 'queries:total{verb="SELECT"} 500',
]) ])