I wrote a simple python reader plugin to collect a few stats from my
local mongo server.  I noticed that over time, my process would
continually grow until it consumed >100MB of memory.  If I simply
comment out my 'v.dispatch()' function, the memory leak goes away.  Is
this an issue with collectd, or my code?  Also, is there an easier way
to differentiate between counters and gauges without adding entries to
the TypeDB?  Here's a few relevant snippets of my code which executes
under collectd's read() callback:

def mongostats_getstats(data):
        # get the delta between the databases' syncedTo counter, and now
        for x in data['db'].sources.find():
                synced_to = x['syncedTo'].as_datetime()
                now = datetime.datetime.now(utc)
                delta = now - synced_to
                total_seconds = ((delta.days * 86400) + delta.seconds)

        # get the rest of the stats
        result = data['db'].command('serverStatus')

        # compile a dictionary of values
        values = {
                'replication_lag': total_seconds,
                'connections': result['connections']['current'],
                'page_faults': result['extra_info']['page_faults'],
                'last_flush_ms': result['backgroundFlushing']['last_ms'],
                'insert': result['opcountersRepl']['insert'] \
                          + result['opcounters']['insert'],
                'query': result['opcountersRepl']['query'] \
                          + result['opcounters']['query'],
                'update': result['opcountersRepl']['update'] \
                          + result['opcounters']['update'],
                'delete': result['opcountersRepl']['delete'] \
                          + result['opcounters']['delete'],
                'getmore': result['opcountersRepl']['getmore'] \
                          + result['opcounters']['getmore'],
                'command': result['opcountersRepl']['command'] \
                          + result['opcounters']['command']
        }

        return values

def mongostats_read(data=None):
        # get stats
        result = mongostats_getstats(data)

        # handle the gauge values first
        for x in ['replication_lag', 'connections', 'last_flush_ms']:
                v = collectd.Values()
                v.plugin = 'mongostats'
                v.type_instance = x
                v.type = 'gauge'
                #v.dispatch(values = [result[x]])

        # now handle the counters
        for x in ['page_faults', 'insert', 'query', \
                  'update', 'delete', 'getmore', 'command']:
                v = collectd.Values()
                v.plugin = 'mongostats'
                v.type_instance = x
                v.type = 'counter'
                #v.dispatch(values = [result[x]])

_______________________________________________
collectd mailing list
collectd@verplant.org
http://mailman.verplant.org/listinfo/collectd

Reply via email to