Hashar has uploaded a new change for review.
https://gerrit.wikimedia.org/r/265252
Change subject: Pass flake8 and add it to tox envlist
......................................................................
Pass flake8 and add it to tox envlist
flake8 pass pep8 and pyflakes linters which help catch a bunch of
issues.
Ignore a couple whitespaces related errors that are used for alignment.
Instruct pep8 to ignore the 'import unittest' in the middle of the file
by suffixing the line with '# noqa'.
Fix all other whitespaces issues.
Change-Id: I78ab5562da0fa3908b74e36c24cc13e74442e320
---
M README.md
M files/varnishkafka_ganglia.py
M tox.ini
3 files changed, 58 insertions(+), 39 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/operations/puppet/varnishkafka
refs/changes/52/265252/1
diff --git a/README.md b/README.md
index 3944678..772633d 100644
--- a/README.md
+++ b/README.md
@@ -36,3 +36,7 @@
## Testing
Run `tox` which setup appropriate virtualenvs and run commands for you.
+
+Python scripts should match the flake8 conventions, you can run them using:
+
+ tox -e flake8
diff --git a/files/varnishkafka_ganglia.py b/files/varnishkafka_ganglia.py
index 507a029..fee229b 100755
--- a/files/varnishkafka_ganglia.py
+++ b/files/varnishkafka_ganglia.py
@@ -22,7 +22,6 @@
logger = logging.getLogger('varnishkafka')
-
# metric keys to skip reporting to ganglia
skip_metrics = [
'app_offset',
@@ -95,7 +94,6 @@
return flattened
-
def tail(filename, n=2):
'''
Tails the last n lines from filename and returns them in a list.
@@ -164,7 +162,7 @@
self.stats_file = stats_file
self.key_separator = key_separator
- # NOTE: It might be more elegant to
+ # NOTE: It might be more elegant to
# store the JSON object as it comes back from stats_file,
# rather than keeping the state in the flattened hash.
@@ -187,7 +185,6 @@
key_separator.join(['kafka', 'varnishkafka', 'time']),
key_separator.join(['kafka', 'rdkafka', 'time']),
]
-
def key_filter(self, key):
'''
@@ -213,7 +210,6 @@
return False
return key
-
def is_counter_stat(self, key):
'''
@@ -328,6 +324,7 @@
last_run_timestamp = 0
key_prefix = ''
+
def metric_handler(name):
"""Get value of particular metric; part of Gmond interface"""
global varnishkafka_stats
@@ -338,7 +335,9 @@
name = name[len(key_prefix):]
seconds_since_last_run = time.time() - last_run_timestamp
if (seconds_since_last_run >= time_max):
- logger.debug('Updating varnishkafka_stats since it has been {0}
seconds, which is more than tmax of {1}'.format(seconds_since_last_run,
time_max))
+ logger.debug(
+ 'Updating varnishkafka_stats since it has been {0} seconds, which '
+ 'is more than tmax of {1}'.format(seconds_since_last_run,
time_max))
varnishkafka_stats.update_stats()
last_run_timestamp = time.time()
@@ -425,19 +424,17 @@
return descriptions
+
def metric_cleanup():
"""Teardown; part of Gmond interface"""
pass
-
-
-
-
-
# To run tests:
# python -m unittest varnishkafka_ganglia
-import unittest
+import unittest # noqa
+
+
class TestVarnishkafkaGanglia(unittest.TestCase):
def setUp(self):
self.key_separator = '&'
@@ -452,17 +449,17 @@
'value4': False,
}
},
- '2.1': ['a','b'],
- '2.1': ['a','b'],
+ '2.1': ['a', 'b'],
+ '2.1': ['a', 'b'],
# '/' should be replaced with key_separator
'3/1': 'nonya',
'notme': 'nope',
'kafka': {
'varnishkafka': {
'time': time.time(),
- 'counter': { self.varnishkafka_stats.counter_stats[0]: 0 },
+ 'counter': {self.varnishkafka_stats.counter_stats[0]: 0},
},
- 'rdkafka': { 'time': time.time() }
+ 'rdkafka': {'time': time.time()}
},
}
self.flattened_should_be = {
@@ -478,7 +475,6 @@
'kafka&varnishkafka&counter&{0}'.format(self.varnishkafka_stats.counter_stats[0]):
0,
'kafka&rdkafka&time': self.json_data['kafka']['rdkafka']['time'],
}
-
def key_filter_callback(self, key):
if key == 'value2':
@@ -517,15 +513,22 @@
self.flattened_should_be['kafka&varnishkafka&time'] += 100.0
self.varnishkafka_stats.update_stats(self.flattened_should_be)
-
self.assertEquals(self.varnishkafka_stats.flattened_stats_previous[counter_key],
previous_value)
-
self.assertEquals(self.varnishkafka_stats.flattened_stats[counter_key],
self.flattened_should_be[counter_key])
-
self.assertEquals(self.varnishkafka_stats.flattened_stats['kafka&varnishkafka&time'],
self.flattened_should_be['kafka&varnishkafka&time'])
+ self.assertEquals(
+ self.varnishkafka_stats.flattened_stats_previous[counter_key],
+ previous_value
+ )
+ self.assertEquals(
+ self.varnishkafka_stats.flattened_stats[counter_key],
+ self.flattened_should_be[counter_key]
+ )
+ self.assertEquals(
+ self.varnishkafka_stats.flattened_stats['kafka&varnishkafka&time'],
+ self.flattened_should_be['kafka&varnishkafka&time']
+ )
per_second_key = self.key_separator.join([counter_key,
self.varnishkafka_stats.per_second_key_suffix])
rate_should_be = (self.flattened_should_be[counter_key] -
self.varnishkafka_stats.flattened_stats_previous[counter_key]) / 100.0
self.assertEquals(self.varnishkafka_stats.flattened_stats[per_second_key],
rate_should_be)
-
-
def generate_pyconf(module_name, metric_descriptions, params={},
collect_every=15, time_threshold=15):
@@ -571,14 +574,12 @@
time_threshold = %(time_threshold)s
%(metrics_string)s
}
-""" % { 'module_name': module_name,
- 'params_string': params_string,
- 'collect_every': collect_every,
- 'time_threshold': time_threshold,
- 'metrics_string': metrics_string
- }
-
-
+""" % {'module_name': module_name,
+ 'params_string': params_string,
+ 'collect_every': collect_every,
+ 'time_threshold': time_threshold,
+ 'metrics_string': metrics_string
+ }
if __name__ == '__main__':
@@ -586,17 +587,22 @@
# metric descriptor and printing it out.
cmdline = optparse.OptionParser(usage="usage: %prog [options] statsfile")
- cmdline.add_option('--generate-pyconf', '-g', dest='pyconf',
metavar='FILE',
+ cmdline.add_option(
+ '--generate-pyconf', '-g', dest='pyconf', metavar='FILE',
help='If set, a .pyconf file will be output with flattened metrics key
from statsfile.')
- cmdline.add_option('--tmax', '-t', action='store', default=15,
+ cmdline.add_option(
+ '--tmax', '-t', action='store', default=15,
help='time_max for ganglia python module metrics.')
- cmdline.add_option('--key-separator', '-k', dest='key_separator',
default='.',
+ cmdline.add_option(
+ '--key-separator', '-k', dest='key_separator', default='.',
help='Key separator for flattened json object key name. Default: \'.\'
\'/\' is not allowed.')
- cmdline.add_option('--key-prefix', '-p', dest='key_prefix', default='',
+ cmdline.add_option(
+ '--key-prefix', '-p', dest='key_prefix', default='',
help='Optional key prefix for flattened json object key name.')
cmdline.add_option('--dry-run', action='store_true', default=False)
- cmdline.add_option('--debug', '-D', action='store_true', default=False,
- help='Provide more verbose logging for debugging.')
+ cmdline.add_option(
+ '--debug', '-D', action='store_true', default=False,
+ help='Provide more verbose logging for debugging.')
cli_options, arguments = cmdline.parse_args()
@@ -604,7 +610,7 @@
cmdline.print_help()
cmdline.error("Must supply statsfile argument.")
- cli_options.stats_file = arguments[0]
+ cli_options.stats_file = arguments[0]
# Turn the optparse.Value object into a regular dict
# so we can pass it to metric_init
@@ -653,7 +659,6 @@
print '\nWrote "%s".' % cli_options.pyconf
sys.exit(0)
-
# Else print out values of metrics in a loop.
else:
diff --git a/tox.ini b/tox.ini
index 3e5f89a..1def39d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,8 +1,18 @@
[tox]
# No setup.py
skipsdist = True
-envlist = py27
+envlist = flake8,py27
[testenv]
commands =
python -m unittest discover -v -s files -p *.py
+
+[testenv:flake8]
+deps = flake8
+commands = flake8 {posargs}
+
+[flake8]
+max-line-length = 136
+; E221 multiple spaces before operator
+; E241 multiple spaces after ':'
+ignore = E221, E241
--
To view, visit https://gerrit.wikimedia.org/r/265252
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I78ab5562da0fa3908b74e36c24cc13e74442e320
Gerrit-PatchSet: 1
Gerrit-Project: operations/puppet/varnishkafka
Gerrit-Branch: master
Gerrit-Owner: Hashar <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits