From 3defc90ebad05852e5c903f809c6309987f88f0c Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Sun, 15 Jan 2017 14:51:47 +0100 Subject: [PATCH 01/41] Preparation for entry-points. This prepares the overall code structure for entry points including removal of the shell script. Managing the cron-script is not functional yet. Nothing has been tested either. Committing so I can sync with a box where I can test. --- munininfluxdb/commands/__init__.py | 0 munininfluxdb/commands/cron.py | 88 +++++++++++++++++++ {bin => munininfluxdb/commands}/fetch.py | 86 +++--------------- .../commands/import_.py | 47 ++++------ munininfluxdb/main.py | 32 +++++++ munininfluxdb/utils.py | 8 ++ setup.py | 6 +- 7 files changed, 163 insertions(+), 104 deletions(-) create mode 100644 munininfluxdb/commands/__init__.py create mode 100644 munininfluxdb/commands/cron.py rename {bin => munininfluxdb/commands}/fetch.py (56%) rename bin/import.py => munininfluxdb/commands/import_.py (86%) create mode 100644 munininfluxdb/main.py diff --git a/munininfluxdb/commands/__init__.py b/munininfluxdb/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/munininfluxdb/commands/cron.py b/munininfluxdb/commands/cron.py new file mode 100644 index 0000000..c14c280 --- /dev/null +++ b/munininfluxdb/commands/cron.py @@ -0,0 +1,88 @@ +from __future__ import print_function +import os +import pwd +import sys + +from munininfluxdb.utils import Symbol, absolute_executable + + +try: + pwd.getpwnam('munin') +except KeyError: + CRON_USER = 'root' +else: + CRON_USER = 'munin' + +# Cron job comment is used to uninstall and must not be manually deleted from the crontab +CRON_COMMENT = 'Update InfluxDB with fresh values from Munin' +NAME = 'cron' +DESCRIPTION = 'Installs or uninstalls the CRON job' + + +def uninstall_cron(): + if os.geteuid() != 0: + print("It seems you are not root, please run \"muninflux fetch --uninstall-cron\" again with root privileges") + sys.exit(1) + + try: + import crontab + except ImportError: + from vendor import crontab + + cron = crontab.CronTab(user=CRON_USER) + jobs = list(cron.find_comment(CRON_COMMENT)) + cron.remove(*jobs) + cron.write() + + return len(jobs) + + +def install_cron(script_file, period): + if os.geteuid() != 0: + print("It seems you are not root, please run \"muninflux fetch --install-cron\" again with root privileges") + sys.exit(1) + + try: + import crontab + except ImportError: + from vendor import crontab + + cron = crontab.CronTab(user=CRON_USER) + job = cron.new(command=script_file, user=CRON_USER, comment=CRON_COMMENT) + job.minute.every(period) + + if job.is_valid() and job.is_enabled(): + cron.write() + + return job.is_valid() and job.is_enabled() + + +def setup(parser): + parser.add_argument('script_path', + help='install a cron job to updated InfluxDB with fresh data from Munin every minutes') + parser.add_argument('-p', '--period', default=5, type=int, + help="sets the period in minutes between each fetch in the cron job (default: %(default)dmin)") + parser.add_argument('--uninstall-cron', action='store_true', + help='uninstall the fetch cron job (any matching the initial comment actually)') + parser.set_defaults(func=main) + + +def main(args): + print(absolute_executable()) + raise NotImplementedError('Not yet implemented as subcommand') + # TODO from pkg_resources import load_entry_point + # TODO entry = load_entry_point('munin-influxdb', 'console_scripts', 'muninflux_fetch') + # TODO # python bin/fetch.py --install-cron $(dirname $(readlink -f "$0"))/bin/fetch.py + + if args.script_path: + install_cron(args.script_path, args.period) + print("{0} Cron job installed for user {1}".format(Symbol.OK_GREEN, CRON_USER)) + return + elif args.uninstall_cron: + nb = uninstall_cron() + if nb: + print("{0} Cron job uninstalled for user {1} ({2} entries deleted)".format(Symbol.OK_GREEN, CRON_USER, nb)) + else: + print("No matching job found (searching comment \"{1}\" in crontab for user {2})".format(Symbol.WARN_YELLOW, + CRON_COMMENT, CRON_USER)) + return diff --git a/bin/fetch.py b/munininfluxdb/commands/fetch.py similarity index 56% rename from bin/fetch.py rename to munininfluxdb/commands/fetch.py index e5c4bec..f6f06ae 100755 --- a/bin/fetch.py +++ b/munininfluxdb/commands/fetch.py @@ -1,10 +1,8 @@ #!/usr/bin/env python from __future__ import print_function -import pwd import json import os import sys -import argparse from collections import defaultdict from munininfluxdb.utils import Symbol @@ -12,21 +10,17 @@ import influxdb +NAME = 'fetch' +DESCRIPTION = """'fetch' command grabs fresh data gathered by a still running Munin installation and send it to InfluxDB. + +Currently, Munin needs to be still running to update the data in '/var/lib/munin/state-*' files. +""" + try: import storable except ImportError: from vendor import storable -try: - pwd.getpwnam('munin') -except KeyError: - CRON_USER = 'root' -else: - CRON_USER = 'munin' - -# Cron job comment is used to uninstall and must not be manually deleted from the crontab -CRON_COMMENT = 'Update InfluxDB with fresh values from Munin' - def pack_values(config, values): suffix = ":{0}".format(Defaults.DEFAULT_RRD_INDEX) metrics, date = values @@ -66,7 +60,9 @@ def read_state_file(filename): assert 'spoolfetch' in data and 'value' in data return data['value'], data['spoolfetch'] -def main(config_filename=Defaults.FETCH_CONFIG): +def main(args): + config_filename = args.config or Defaults.FETCH_CONFIG + config = None with open(config_filename) as f: config = json.load(f) @@ -113,68 +109,8 @@ def main(config_filename=Defaults.FETCH_CONFIG): json.dump(config, f) print("{0} Updated configuration: {1}".format(Symbol.OK_GREEN, f.name)) -def uninstall_cron(): - if os.geteuid() != 0: - print("It seems you are not root, please run \"muninflux fetch --uninstall-cron\" again with root privileges".format(sys.argv[0])) - sys.exit(1) - - try: - import crontab - except ImportError: - from vendor import crontab - - cron = crontab.CronTab(user=CRON_USER) - jobs = list(cron.find_comment(CRON_COMMENT)) - cron.remove(*jobs) - cron.write() - - return len(jobs) -def install_cron(script_file, period): - if os.geteuid() != 0: - print("It seems you are not root, please run \"muninflux fetch --install-cron\" again with root privileges".format(sys.argv[0])) - sys.exit(1) - - try: - import crontab - except ImportError: - from vendor import crontab - - cron = crontab.CronTab(user=CRON_USER) - job = cron.new(command=script_file, user=CRON_USER, comment=CRON_COMMENT) - job.minute.every(period) - - if job.is_valid() and job.is_enabled(): - cron.write() - - return job.is_valid() and job.is_enabled() - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description=""" - 'fetch' command grabs fresh data gathered by a still running Munin installation and send it to InfluxDB. - - Currently, Munin needs to be still running to update the data in '/var/lib/munin/state-*' files. - """) +def setup(parser): parser.add_argument('--config', default=Defaults.FETCH_CONFIG, help='overrides the default configuration file (default: %(default)s)') - cronargs = parser.add_argument_group('cron job management') - cronargs.add_argument('--install-cron', dest='script_path', - help='install a cron job to updated InfluxDB with fresh data from Munin every minutes') - cronargs.add_argument('-p', '--period', default=5, type=int, - help="sets the period in minutes between each fetch in the cron job (default: %(default)min)") - cronargs.add_argument('--uninstall-cron', action='store_true', - help='uninstall the fetch cron job (any matching the initial comment actually)') - args = parser.parse_args() - - if args.script_path: - install_cron(args.script_path, args.period) - print("{0} Cron job installed for user {1}".format(Symbol.OK_GREEN, CRON_USER)) - elif args.uninstall_cron: - nb = uninstall_cron() - if nb: - print("{0} Cron job uninstalled for user {1} ({2} entries deleted)".format(Symbol.OK_GREEN, CRON_USER, nb)) - else: - print("No matching job found (searching comment \"{1}\" in crontab for user {2})".format(Symbol.WARN_YELLOW, - CRON_COMMENT, CRON_USER)) - else: - main(args.config) + parser.set_defaults(func=main) diff --git a/bin/import.py b/munininfluxdb/commands/import_.py similarity index 86% rename from bin/import.py rename to munininfluxdb/commands/import_.py index 8ad36ed..1bd2c51 100755 --- a/bin/import.py +++ b/munininfluxdb/commands/import_.py @@ -1,15 +1,27 @@ -#!/usr/bin/env python from __future__ import print_function - -import argparse -import sys +import logging from munininfluxdb import munin from munininfluxdb import rrd from munininfluxdb.settings import Settings, Defaults from munininfluxdb.influxdbclient import InfluxdbClient from munininfluxdb.grafana import Dashboard -from munininfluxdb.utils import Color, Symbol +from munininfluxdb.utils import Color, Symbol, prompt + + +LOG = logging.getLogger(__name__) +NAME = 'import' +DESCRIPTION = """'import' command is a conversion tool from Munin to InfluxDB + Grafana + +It reads Munin's configuration files, parses the RRD folder (WWW cache folder if needed) to extract the structure +of your current Munin setup. Data, with full history, is converted to InfluxDB time series format and uploaded to +a InfluxDB server. You then have the possibility to generate a Grafana dashboard (JSON file to be imported manually) +taking advantage of the features of Grafana with you current Munin configuration and plugins. + +After 'import' is completed, a cron job is installed to run the 'fetch' command every 5 minutes (default Munin analysis +period). This updates the InfluxDB series with fresh data from Munin. +See 'fetch -h' for details. +""" def retrieve_munin_configuration(settings): @@ -108,19 +120,7 @@ def main(args): print("Then we're good! Have a nice day!") -if __name__ == "__main__": - parser = argparse.ArgumentParser(description=""" - 'import' command is a conversion tool from Munin to InfluxDB + Grafana - - It reads Munin's configuration files, parses the RRD folder (WWW cache folder if needed) to extract the structure - of your current Munin setup. Data, with full history, is converted to InfluxDB time series format and uploaded to - a InfluxDB server. You then have the possibility to generate a Grafana dashboard (JSON file to be imported manually) - taking advantage of the features of Grafana with you current Munin configuration and plugins. - - After 'import' is completed, a cron job is installed to run the 'fetch' command every 5 minutes (default Munin analysis - period). This updates the InfluxDB series with fresh data from Munin. - See 'fetch -h' for details. - """) +def setup(parser): parser.add_argument('--interactive', dest='interactive', action='store_true') parser.add_argument('--no-interactive', dest='interactive', action='store_false') parser.set_defaults(interactive=True) @@ -168,13 +168,4 @@ def main(args): grafanargs.add_argument('--grafana-cols', default=2, type=int, help='number of panel per row') grafanargs.add_argument('--grafana-tags', nargs='+', help='grafana dashboard tags') - args = parser.parse_args() - - try: - main(args) - except KeyboardInterrupt: - print("\n{0} Canceled.".format(Symbol.NOK_RED)) - sys.exit(1) - except Exception as e: - print("{0} Error: {1}".format(Symbol.NOK_RED, e)) - sys.exit(1) + parser.set_defaults(func=main) diff --git a/munininfluxdb/main.py b/munininfluxdb/main.py new file mode 100644 index 0000000..feab366 --- /dev/null +++ b/munininfluxdb/main.py @@ -0,0 +1,32 @@ +from __future__ import print_function +from argparse import ArgumentParser +import sys + +from munininfluxdb.utils import Symbol +import munininfluxdb.commands.cron as cmd_cron +import munininfluxdb.commands.fetch as cmd_fetch +import munininfluxdb.commands.import_ as cmd_import + + +def main(): + parser = ArgumentParser(description='TODO') # TODO + subparsers = parser.add_subparsers( + title='subcommands', + description='valid subcommands', + help='additional help' + ) + + for subcommand in (cmd_import, cmd_fetch, cmd_cron): + subparser = subparsers.add_parser(subcommand.NAME, + description=subcommand.DESCRIPTION) + subcommand.setup(subparser) + + args = parser.parse_args() + try: + args.func(args) + except KeyboardInterrupt: + print("\n{0} Canceled.".format(Symbol.NOK_RED)) + sys.exit(1) + except Exception as e: + print("{0} Error: {1}".format(Symbol.NOK_RED, e)) + sys.exit(1) diff --git a/munininfluxdb/utils.py b/munininfluxdb/utils.py index 3d2cde8..9bf3f08 100644 --- a/munininfluxdb/utils.py +++ b/munininfluxdb/utils.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import print_function +from os.path import join, basename import sys @@ -48,6 +49,13 @@ def show(self): print("") +prompt = input if sys.version_info >= (3, 0, 0) else raw_input + + +def absolute_executable(): + return join(sys.prefix, 'bin', basename(sys.argv[0])) + + def parse_handle(handle): """ Parses a connection handle to get it's subparts (user, password, host, port, dbname) diff --git a/setup.py b/setup.py index 8d0888b..e11e43d 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,11 @@ url='http://github.com/mvonthron/munin-influxdb', license='BSD', py_modules=['munininfluxdb'], - scripts=['muninflux'], + entry_points={ + 'console_scripts': [ + 'muninflux = munininfluxdb.main:main', + ] + }, install_requires=['influxdb>=2.12.0', 'requests'], packages=find_packages(), classifiers=[ From 5070576191e962efce5694215264dc09ae4bee40 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Fri, 20 Jan 2017 10:36:06 +0100 Subject: [PATCH 02/41] CRON subcommand added. Passing script-path is also no longer required. --- munininfluxdb/commands/cron.py | 60 +++++++++++++++------------------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/munininfluxdb/commands/cron.py b/munininfluxdb/commands/cron.py index c14c280..dae7452 100644 --- a/munininfluxdb/commands/cron.py +++ b/munininfluxdb/commands/cron.py @@ -19,7 +19,7 @@ DESCRIPTION = 'Installs or uninstalls the CRON job' -def uninstall_cron(): +def uninstall_cron(args): if os.geteuid() != 0: print("It seems you are not root, please run \"muninflux fetch --uninstall-cron\" again with root privileges") sys.exit(1) @@ -34,12 +34,20 @@ def uninstall_cron(): cron.remove(*jobs) cron.write() - return len(jobs) + nb = len(jobs) + if nb: + print("{0} Cron job uninstalled for user {1} ({2} entries deleted)".format(Symbol.OK_GREEN, CRON_USER, nb)) + else: + print("No matching job found (searching comment \"{1}\" in crontab for user {2})".format(Symbol.WARN_YELLOW, + CRON_COMMENT, CRON_USER)) -def install_cron(script_file, period): +def install_cron(args): + script_path = absolute_executable() + cmd = '%s fetch' % script_path + if os.geteuid() != 0: - print("It seems you are not root, please run \"muninflux fetch --install-cron\" again with root privileges") + print("It seems you are not root, please run \"%s cron install\" again with root privileges") sys.exit(1) try: @@ -48,41 +56,25 @@ def install_cron(script_file, period): from vendor import crontab cron = crontab.CronTab(user=CRON_USER) - job = cron.new(command=script_file, user=CRON_USER, comment=CRON_COMMENT) - job.minute.every(period) + job = cron.new(command=cmd, user=CRON_USER, comment=CRON_COMMENT) + job.minute.every(args.period) if job.is_valid() and job.is_enabled(): cron.write() + print("{0} Cron job installed for user {1}".format(Symbol.OK_GREEN, CRON_USER)) return job.is_valid() and job.is_enabled() def setup(parser): - parser.add_argument('script_path', - help='install a cron job to updated InfluxDB with fresh data from Munin every minutes') - parser.add_argument('-p', '--period', default=5, type=int, - help="sets the period in minutes between each fetch in the cron job (default: %(default)dmin)") - parser.add_argument('--uninstall-cron', action='store_true', - help='uninstall the fetch cron job (any matching the initial comment actually)') - parser.set_defaults(func=main) - - -def main(args): - print(absolute_executable()) - raise NotImplementedError('Not yet implemented as subcommand') - # TODO from pkg_resources import load_entry_point - # TODO entry = load_entry_point('munin-influxdb', 'console_scripts', 'muninflux_fetch') - # TODO # python bin/fetch.py --install-cron $(dirname $(readlink -f "$0"))/bin/fetch.py - - if args.script_path: - install_cron(args.script_path, args.period) - print("{0} Cron job installed for user {1}".format(Symbol.OK_GREEN, CRON_USER)) - return - elif args.uninstall_cron: - nb = uninstall_cron() - if nb: - print("{0} Cron job uninstalled for user {1} ({2} entries deleted)".format(Symbol.OK_GREEN, CRON_USER, nb)) - else: - print("No matching job found (searching comment \"{1}\" in crontab for user {2})".format(Symbol.WARN_YELLOW, - CRON_COMMENT, CRON_USER)) - return + subparsers = parser.add_subparsers(title='CRON commands') + install_parser = subparsers.add_parser( + 'install', description='Installs the CRON job') + uninstall_parser = subparsers.add_parser( + 'uninstall', description='Uninstalls the CRON job') + + install_parser.add_argument( + '-p', '--period', default=5, type=int, + help="sets the period in minutes between each fetch in the cron job (default: %(default)dmin)") + install_parser.set_defaults(func=install_cron) + uninstall_parser.set_defaults(func=uninstall_cron) From 23ec84a4a39274ce0fc93d89128ea009a26299eb Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Fri, 20 Jan 2017 10:38:08 +0100 Subject: [PATCH 03/41] Added "crontab" to the dependencides. No longer need the "vendor" copy --- munininfluxdb/commands/cron.py | 12 +- setup.py | 6 +- vendor/crontab.py | 1023 -------------------------------- 3 files changed, 7 insertions(+), 1034 deletions(-) delete mode 100644 vendor/crontab.py diff --git a/munininfluxdb/commands/cron.py b/munininfluxdb/commands/cron.py index dae7452..735ddc5 100644 --- a/munininfluxdb/commands/cron.py +++ b/munininfluxdb/commands/cron.py @@ -3,6 +3,8 @@ import pwd import sys +import crontab + from munininfluxdb.utils import Symbol, absolute_executable @@ -24,11 +26,6 @@ def uninstall_cron(args): print("It seems you are not root, please run \"muninflux fetch --uninstall-cron\" again with root privileges") sys.exit(1) - try: - import crontab - except ImportError: - from vendor import crontab - cron = crontab.CronTab(user=CRON_USER) jobs = list(cron.find_comment(CRON_COMMENT)) cron.remove(*jobs) @@ -50,11 +47,6 @@ def install_cron(args): print("It seems you are not root, please run \"%s cron install\" again with root privileges") sys.exit(1) - try: - import crontab - except ImportError: - from vendor import crontab - cron = crontab.CronTab(user=CRON_USER) job = cron.new(command=cmd, user=CRON_USER, comment=CRON_COMMENT) job.minute.every(args.period) diff --git a/setup.py b/setup.py index e11e43d..e31a483 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,11 @@ 'muninflux = munininfluxdb.main:main', ] }, - install_requires=['influxdb>=2.12.0', 'requests'], + install_requires=[ + 'influxdb>=2.12.0', + 'requests', + 'python-crontab>=2.1.1' + ], packages=find_packages(), classifiers=[ 'Development Status :: 4 - Beta', diff --git a/vendor/crontab.py b/vendor/crontab.py deleted file mode 100644 index c47ddf4..0000000 --- a/vendor/crontab.py +++ /dev/null @@ -1,1023 +0,0 @@ -# -# Copyright 2014, Martin Owens -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# Ideas from gnome-schedule: Philip Van Hoof, Gaute Hope, Kristof Vansant -# -# REQUEST: Please do NOT simply copy and paste this code into your own -# projects. Please package this module for your distribution and -# use as a direct dependancy. -# -""" -from crontab import CronTab -import sys - -# Create a new non-installed crontab -cron = CronTab(tab='') -job = cron.new(command='/usr/bin/echo') - -job.minute.during(5,50).every(5) -job.hour.every(4) - -job.dow.on('SUN') -job.month.during('APR', 'JUN') -job.month.also.during('OCT', 'DEC') - -job.every(2).days() -job.setall(1, 12, None, None, None) - -job2 = cron.new(command='/foo/bar', comment='SomeID') -job2.every_reboot() - -jobs = list(cron.find_command('bar')) -job3 = jobs[0] -job3.clear() -job3.minute.every(1) - -sys.stdout.write(str(cron.render())) - -job3.enable(False) - -for job4 in cron.find_command('echo'): - sys.stdout.write(job4) - -for job5 in cron.find_comment('SomeID'): - sys.stdout.write(job5) - -for job6 in cron: - sys.stdout.write(job6) - -for job7 in cron: - job7.every(3).hours() - sys.stdout.write(job7) - job7.every().dow() - -cron.remove_all(command='/foo/bar') -cron.remove_all(comment='This command') -cron.remove_all(time='* * * * *') -cron.remove_all() - -output = cron.render() - -cron.write() - -cron.write(filename='/tmp/output.txt') - -#cron.write_to_user(user=True) - -#cron.write_to_user(user='root') - -# Croniter Extentions allow you to ask for the scheduled job times, make -# sure you have croniter installed, it's not a hard dependancy. - -job3.schedule().get_next() -job3.schedule().get_prev() - -""" - -import os -import re -import sys - -import codecs -import tempfile -import subprocess as sp - -from datetime import date, datetime - -__pkgname__ = 'python-crontab' -__version__ = '1.9.0' - -ITEMREX = re.compile(r'^\s*([^@#\s]+)\s+([^@#\s]+)\s+([^@#\s]+)\s+([^@#\s]+)' - r'\s+([^@#\s]+)\s+([^#\n]*)(\s+#\s*([^\n]*)|$)') -SPECREX = re.compile(r'^\s*@(\w+)\s([^#\n]*)(\s+#\s*([^\n]*)|$)') -DEVNULL = ">/dev/null 2>&1" - -WEEK_ENUM = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] - -MONTH_ENUM = [None, 'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', - 'sep', 'oct', 'nov', 'dec'] - -SPECIALS = {"reboot": '@reboot', - "hourly": '0 * * * *', - "daily": '0 0 * * *', - "weekly": '0 0 * * 0', - "monthly": '0 0 1 * *', - "yearly": '0 0 1 1 *', - "annually": '0 0 1 1 *', - "midnight": '0 0 * * *'} - -SPECIAL_IGNORE = ['midnight', 'annually'] - -S_INFO = [ - {'name': 'Minutes', 'max': 59, 'min': 0}, - {'name': 'Hours', 'max': 23, 'min': 0}, - {'name': 'Day of Month', 'max': 31, 'min': 1}, - {'name': 'Month', 'max': 12, 'min': 1, 'enum': MONTH_ENUM}, - {'name': 'Day of Week', 'max': 6, 'min': 0, 'enum': WEEK_ENUM}, -] - -# Detect Python3 and which OS for temperments. -import platform -PY3 = platform.python_version()[0] == '3' -WINOS = platform.system() == 'Windows' -SYSTEMV = not WINOS and os.uname()[0] in ["SunOS", "AIX", "HP-UX"] -SYSTEMV = SYSTEMV or os.getenv('SYSTEMV_TEST') - -TESTING = False -CRONCMD = "/usr/bin/crontab" - -try: - import pwd -except ImportError: - pwd = None - -if PY3: - # pylint: disable=W0622 - unicode = str - basestring = str - -try: - # Croniter is an optional import - from croniter.croniter import croniter - - class Croniter(croniter): - """Same as normal croniter, but always return datetime objects""" - def get_next(self, type_ref=datetime): - return croniter.get_next(self, type_ref) - - def get_prev(self, type_ref=datetime): - return croniter.get_prev(self, type_ref) - - def get_current(self, type_ref=datetime): - return croniter.get_current(self, type_ref) -except ImportError: - Croniter = None - - -def pipeOpen(cmd, *args, **flags): - """Runs a program and orders the arguments for compatability. - - a. keyword args are flags and always appear /before/ arguments for bsd - """ - l = (cmd,) - for (k,v) in flags.items(): - if v is not None: - l += len(k)==1 and ("-%s" % (k,), str(v)) or ("--%s=%s" % (k,v),) - l += tuple(args) - return sp.Popen(tuple(a for a in l if a), stdout=sp.PIPE, stderr=sp.PIPE) - - -class CronTab(object): - """ - Crontab object which can access any time based cron using the standard. - - user - Set the user of the crontab (default: None) - * 'user' = Load from $username's crontab (instead of tab or tabfile) - * None = Don't load anything from any user crontab. - * True = Load from current $USER's crontab (unix only) - * False = This is a system crontab, each command has a username - - tab - Use a string variable as the crontab instead of installed crontab - tabfile - Use a file for the crontab instead of installed crontab - log - Filename for logfile instead of /var/log/syslog - - """ - def __init__(self, user=None, tab=None, tabfile=None, log=None): - self.lines = None - self.crons = None - self.filen = None - # Protect windows users - self.root = not WINOS and os.getuid() == 0 - # Storing user flag / username - self._user = user - # Load string or filename as inital crontab - self.intab = tab - self.read(tabfile) - self._log = log - - @property - def log(self): - """Returns the CronLog object for this tab (user or root tab only)""" - from cronlog import CronLog - if self._log is None or isinstance(self._log, basestring): - self._log = CronLog(self._log, user=self.user or 'root') - return self._log - - @property - def user(self): - if self._user is True and pwd: - return pwd.getpwuid(os.getuid())[0] - return self._user - - def read(self, filename=None): - """ - Read in the crontab from the system into the object, called - automatically when listing or using the object. use for refresh. - """ - self.crons = [] - self.lines = [] - lines = [] - if self.intab is not None: - lines = self.intab.split('\n') - elif filename: - self.filen = filename - with codecs.open(filename, 'r', encoding='utf-8') as fhl: - lines = fhl.readlines() - elif self.user: - (out, err) = pipeOpen(CRONCMD, l='', u=self.user).communicate() - if err and 'no crontab for' in str(err): - pass - elif err: - raise IOError("Read crontab %s: %s" % (self.user, err)) - lines = out.decode('utf-8').split("\n") - for line in lines: - cron = CronItem(line, cron=self) - if cron.is_valid(): - self.crons.append(cron) - self.lines.append(cron) - else: - self.lines.append(line.replace('\n', '')) - - def write(self, filename=None): - """Write the crontab to it's source or a given filename.""" - if filename: - self.filen = filename - - # Add to either the crontab or the internal tab. - if self.intab is not None: - self.intab = self.render() - # And that's it if we never saved to a file - if not self.filen: - return - - if self.filen: - fileh = open(self.filen, 'wb') - else: - filed, path = tempfile.mkstemp() - fileh = os.fdopen(filed, 'wb') - - fileh.write(self.render().encode('utf-8')) - fileh.close() - - if not self.filen: - # Add the entire crontab back to the user crontab - pipeOpen(CRONCMD, path, u=self.user).wait() - os.unlink(path) - - def write_to_user(self, user=None): - """Write the crontab to a user (or root) instead of a file.""" - self.filen = None - self.intab = None - if user is not None: - self.user = user - self.write() - - def render(self): - """Render this crontab as it would be in the crontab.""" - crons = [] - for cron in self.lines: - crons.append(unicode(cron)) - result = u'\n'.join(crons) - if result and result[-1] not in (u'\n', u'\r'): - result += u'\n' - return result - - def new(self, command='', comment='', user=None): - """ - Create a new cron with a command and comment. - - Returns the new CronItem object. - """ - if not user and self.user is False: - raise ValueError("User is required for system crontabs.") - item = CronItem(command=command, comment=comment, user=user, cron=self) - self.crons.append(item) - self.lines.append(item) - return item - - def find_command(self, command): - """Return an iter of jobs matching any part of the command.""" - for job in self.crons: - if command in job.command: - yield job - - def find_comment(self, comment): - """Return an iter of jobs that match the comment field exactly.""" - for job in self.crons: - if job.comment == comment: - yield job - - def find_time(self, *args): - """Return an iter of jobs that match this time pattern""" - for job in self.crons: - if job.slices == CronSlices(*args): - yield job - - @property - def commands(self): - """Return a generator of all unqiue commands used in this crontab""" - returned = [] - for cron in self.crons: - if cron.command not in returned: - yield cron.command - returned.append(cron.command) - - @property - def comments(self): - """Return a generator of all unique comments/Id used in this crontab""" - returned = [] - for cron in self.crons: - if cron.comment and cron.comment not in returned: - yield cron.comment - returned.append(cron.comment) - - def remove_all(self, command=None, comment=None, time=None): - """Removes all crons using the stated command OR that have the - stated comment OR removes everything if no arguments specified.""" - if command: - return self.remove(*self.find_command(command)) - elif comment: - return self.remove(*self.find_comment(comment)) - elif time: - return self.remove(*self.find_time(time)) - return self.remove(*self.crons[:]) - - def remove(self, *items): - """Remove a selected cron from the crontab.""" - result = 0 - for item in items: - result += self._remove(item) - return result - - def _remove(self, item): - """Internal removal of an item""" - # The last item often has a trailing line feed - if self.crons[-1] == item and self.lines[-1] == '': - self.lines.remove(self.lines[-1]) - self.crons.remove(item) - self.lines.remove(item) - return 1 - - def __iter__(self): - return self.crons.__iter__() - - def __getitem__(self, i): - return self.crons[i] - - def __unicode__(self): - return self.render() - - def __len__(self): - return len(self.crons) - - def __str__(self): - return self.render() - - -class CronItem(object): - """ - An item which objectifies a single line of a crontab and - May be considered to be a cron job object. - """ - def __init__(self, line=None, command='', comment='', user=None, cron=None): - self.cron = cron - self.user = user - self.valid = False - self.enabled = True - self.special = False - self.comment = comment - self.command = None - - self._log = None - - # Initalise five cron slices using static info. - self.slices = CronSlices() - - if line and line.strip(): - self.parse(line.strip()) - - elif command: - self.set_command(command) - self.valid = True - - def delete(self): - """Delete this item and remove it from it's parent""" - if not self.cron: - sys.stderr.write("Cron item is not associated with a crontab!\n") - else: - self.cron.remove(self) - - def set_command(self, cmd): - """Set the command and filter as needed""" - self.command = cmd.strip() - - def set_comment(self, cmt): - """Set the comment and don't filter""" - self.comment = cmt - - def parse(self, line): - """Parse a cron line string and save the info as the objects.""" - if type(line) is str and not PY3: - line = unicode(line, 'utf-8') - if not line or line[0] == '#': - self.enabled = False - line = line[1:].strip() - self._set_parse(ITEMREX.findall(line)) - self._set_parse(SPECREX.findall(line)) - - def _set_parse(self, result): - if not result: - return - if self.cron.user == False: - # Special flag to look for per-command user - (user, cmd) = result[0][-3].split(' ', 1) - self.set_command(cmd) - self.user = user - else: - self.set_command(result[0][-3]) - - self.valid = self.setall(*result[0][:-3]) - self.comment = result[0][-1] - self.enabled = self.enabled and self.valid - - def enable(self, enabled=True): - """Set if this cron job is enabled or not""" - if enabled in [True, False]: - self.enabled = enabled - return self.enabled - - def is_enabled(self): - """Return true if this job is enabled (not commented out)""" - return self.enabled - - def is_valid(self): - """Return true if this job is valid""" - return self.valid - - def render(self): - """Render this set cron-job to a string""" - if type(self.command) is str and not PY3: - self.command = unicode(self.command, 'utf-8') - user = '' - if self.cron.user is False: - if not self.user: - raise ValueError("Job to system-cron format, no user set!") - user = self.user + ' ' - result = u"%s %s%s" % (str(self.slices), user, self.command) - if self.comment: - if type(self.comment) is str and not PY3: - self.comment = unicode(self.comment, 'utf-8') - result += u" # " + self.comment - if not self.enabled: - result = u"# " + result - return result - - def every_reboot(self): - """Set to every reboot instead of a time pattern: @reboot""" - self.clear() - return self.slices.setall('@reboot') - - def every(self, unit=1): - """ - Replace existing time pattern with a single unit, setting all lower - units to first value in valid range. - - For instance job.every(3).days() will be `0 0 */3 * *` - while job.day().every(3) would be `* * */3 * *` - - Many of these patterns exist as special tokens on Linux, such as - `@midnight` and `@hourly` - """ - return ItemEveryInterface(self.slices, unit) - - def setall(self, *args): - """Replace existing time pattern with these five values given as args: - - job.setall("1 2 * * *") - job.setall(1, 2) == '1 2 * * *' - job.setall(0, 0, None, '>', 'SUN') == '0 0 * 12 SUN' - """ - return self.slices.setall(*args) - - def clear(self): - """Clear the special and set values""" - return self.slices.clear() - - def frequency(self, year=None): - """Returns the number of times this item will execute in a given year - (defaults to this year) - """ - return self.slices.frequency(year=year) - - def frequency_per_year(self, year=None): - """Returns the number of /days/ this item will execute on in a year - (defaults to this year) - """ - return self.slices.frequency_per_year(year=year) - - def frequency_per_day(self): - """Returns the number of time this item will execute in any day""" - return self.slices.frequency_per_day() - - def schedule(self, date_from=None): - """Return a croniter schedule is available.""" - if not date_from: - date_from = datetime.now() - if Croniter: - return Croniter(self.slices.clean_render(), date_from) - raise ImportError("Croniter is not available. Please install croniter" - " python module via pip or your package manager") - - @property - def log(self): - """Return a cron log specific for this job only""" - if not self._log and self.cron: - self._log = self.cron.log.for_program(self.command) - return self._log - - @property - def minute(self): - """Return the minute slice""" - return self.slices[0] - - @property - def minutes(self): - """Same as minute""" - return self.minute - - @property - def hour(self): - """Return the hour slice""" - return self.slices[1] - - @property - def hours(self): - """Same as hour""" - return self.hour - - @property - def day(self): - return self.dom - - @property - def dom(self): - """Return the day-of-the month slice""" - return self.slices[2] - - @property - def month(self): - """Return the month slice""" - return self.slices[3] - - @property - def months(self): - """Same as month""" - return self.month - - @property - def dow(self): - """Return the day of the week slice""" - return self.slices[4] - - def __repr__(self): - return "" % str(self) - - def __len__(self): - return len(str(self)) - - def __getitem__(self, x): - return self.slices[x] - - def __lt__(self, value): - return self.frequency() < CronSlices(value).frequency() - - def __gt__(self, value): - return self.frequency() > CronSlices(value).frequency() - - def __str__(self): - return self.__unicode__() - - def __unicode__(self): - if not self.is_valid(): - sys.stderr.write("Ignoring invalid crontab line\n") - return "# " + unicode(self.render()) - return self.render() - - -class ItemEveryInterface(object): - """Provide an interface to the job.every() method: - Available Calls: - minute, minutes, hour, hours, dom, doms, month, months, dow, dows - - Once run all units will be cleared (set to *) then proceeding units - will be set to '0' and the target unit will be set as every x units. - """ - def __init__(self, item, units): - self.slices = item - self.unit = units - for (x, i) in enumerate(['minute', 'hour', 'dom', 'month', 'dow', - 'min', 'hour', 'day', 'moon', 'weekday']): - setattr(self, i, self._set(x % 5)) - setattr(self, i+'s', self._set(x % 5)) - - def _set(self, target): - def innercall(): - """Returned inner call for setting slice targets""" - self.slices.clear() - # Day-of-week is actually a level 2 set, not level 4. - for p in range(target == 4 and 2 or target): - self.slices[p].on('<') - self.slices[target].every(self.unit) - return innercall - - def year(self): - """Special every year target""" - if self.unit > 1: - raise ValueError("Invalid value '%s', outside 1 year" % self.unit) - self.slices.setall('@yearly') - - -class CronSlices(list): - """Controls a list of five time 'slices' which reprisent: - minute frequency, hour frequency, day of month frequency, - month requency and finally day of the week frequency. - """ - def __init__(self, *args): - for info in S_INFO: - self.append(CronSlice(info)) - self.special = None - if args and not self.setall(*args): - raise ValueError("Can't set cron value to: %s" % str(args)) - - def setall(self, to, *slices): - """Parses the various ways date/time frequency can be specified""" - self.clear() - if isinstance(to, CronItem): - slices = to.slices - elif isinstance(to, list): - slices = to - elif slices: - slices = (to,) + slices - elif isinstance(to, basestring) and to: - if to.count(' ') == 4: - slices = to.strip().split(' ') - elif to.strip()[0] == '@': - to = to[1:] - else: - slices = [to] - - if to == 'reboot': - self.special = '@reboot' - return True - elif to in SPECIALS.keys(): - return self.setall(SPECIALS[to]) - - if id(slices) == id(self): - raise ValueError("Can not set cron to itself!") - - for a, b in zip(self, slices): - try: - a.parse(b) - except ValueError as error: - if not TESTING: - sys.stderr.write("WARNING: %s\n" % str(error)) - return False - except Exception: - return False - return True - - def clean_render(self): - """Return just numbered parts of this crontab""" - return ' '.join([unicode(s) for s in self]) - - def render(self): - "Return just the first part of a cron job (the numbers or special)" - time = self.clean_render() - if self.special: - return self.special - elif not SYSTEMV: - for (name, value) in SPECIALS.items(): - if value == time and name not in SPECIAL_IGNORE: - return "@%s" % name - return time - - def clear(self): - """Clear the special and set values""" - self.special = None - for s in self: - s.clear() - - def frequency(self, year=None): - return self.frequency_per_year(year=year) * self.frequency_per_day() - - def frequency_per_year(self, year=None): - result = 0 - if not year: - year = date.today().year - - weekdays = list(self[4]) - - for month in self[3]: - for day in self[2]: - try: - if date(year, month, day).weekday() in weekdays: - result += 1 - except ValueError: - continue - return result - - def frequency_per_day(self): - """Returns the number of time this item will execute in any day""" - return len(self[0]) * len(self[1]) - - def __str__(self): - return self.render() - - def __eq__(self, arg): - return self.render() == CronSlices(arg).render() - - -class SundayError(KeyError): - pass - - -class CronSlice(object): - """Cron slice object which shows a time pattern""" - def __init__(self, info, value=None): - self.min = info.get('min', None) - self.max = info.get('max', None) - self.name = info.get('name', None) - self.enum = info.get('enum', None) - self.parts = [] - if value: - self.parse(value) - - def parse(self, value): - """Set values into the slice.""" - self.parts = [] - if value is None: - return self.clear() - for part in str(value).split(','): - if part.find("/") > 0 or part.find("-") > 0 or part == '*': - self.parts.append(self.get_range(part)) - else: - try: - self.parts.append(self._v(part)) - except SundayError: - self.parts.append(0) - except ValueError as err: - raise ValueError('%s:%s/%s' % (str(err), self.name, part)) - - def render(self, resolve=False): - """Return the slice rendered as a crontab. - - resolve - return integer values instead of enums (default False) - - """ - if len(self.parts) == 0: - return '*' - return _render_values(self.parts, ',', resolve) - - def __repr__(self): - return "" % str(self) - - def __eq__(self, value): - return str(self) == str(value) - - def __str__(self): - return self.__unicode__() - - def __unicode__(self): - return self.render() - - def every(self, n_value, also=False): - """Set the every X units value""" - if not also: - self.clear() - self.parts.append(self.get_range(int(n_value))) - return self.parts[-1] - - def on(self, *n_value, **opts): - """Set the time values to the specified placements.""" - if not opts.get('also', False): - self.clear() - for av in n_value: - try: - self.parts += self._v(av), - except SundayError: - self.parts += 0, - return self.parts - - def during(self, vfrom, vto, also=False): - """Set the During value, which sets a range""" - if not also: - self.clear() - self.parts.append(self.get_range(self._v(vfrom), self._v(vto))) - return self.parts[-1] - - @property - def also(self): - """Appends rather than replaces the new values""" - outself = self - - class Also(object): - """Will append new values""" - def every(self, *a): - """Also every one of these""" - return outself.every(*a, also=True) - - def on(self, *a): - """Also on these""" - return outself.on(*a, also=True) - - def during(self, *a): - """Also during these""" - return outself.during(*a, also=True) - return Also() - - def clear(self): - """clear the slice ready for new vaues""" - self.parts = [] - - def get_range(self, *vrange): - """Return a cron range for this slice""" - return CronRange(self, *vrange) - - def __iter__(self): - """Return the entire element as an iterable""" - r = {} - # An empty part means '*' which is every(1) - if not self.parts: - self.every(1) - for part in self.parts: - if isinstance(part, CronRange): - for bit in part.range(): - r[bit] = 1 - else: - r[int(part)] = 1 - for x in r: - yield x - - def __len__(self): - """Returns the number of times this slice happens in it's range""" - return len(list(self.__iter__())) - - def _v(self, v): - if v == '>': - v = self.max - elif v == '<': - v = self.min - try: - out = get_cronvalue(v, self.enum) - except ValueError: - raise ValueError("Unrecognised '%s'='%s'" % (self.name, v)) - except KeyError: - raise KeyError("No enumeration '%s' got '%s'" % (self.name, v)) - - if self.max == 6 and int(out) == 7: - raise SundayError("Detected Sunday as 7 instead of 0!") - - if int(out) < self.min or int(out) > self.max: - raise ValueError("Invalid value '%s', expected %d-%d for %s" % ( - str(v), self.min, self.max, self.name)) - return out - - def filter_v(self, v): - """Support wrapper for enumerations and check for range""" - return self._v(v) - - -def get_cronvalue(value, enums): - """Returns a value as int (pass-through) or a special enum value""" - if isinstance(value, int): - return value - elif str(value).isdigit(): - return int(str(value)) - if not enums: - raise KeyError("No enumeration allowed") - return CronValue(str(value), enums) - - -class CronValue(object): - """Represent a special value in the cron line""" - def __init__(self, value, enums): - self.enum = value - self.value = enums.index(value.lower()) - - def __lt__(self, value): - return self.value < int(value) - - def __repr__(self): - return str(self) - - def __str__(self): - return self.enum - - def __int__(self): - return self.value - - -def _render_values(values, sep=',', resolve=False): - """Returns a rendered list, sorted and optionally resolved""" - if len(values) > 1: - values.sort() - return sep.join([_render(val, resolve) for val in values]) - - -def _render(value, resolve=False): - """Return a single value rendered""" - if isinstance(value, CronRange): - return value.render(resolve) - if resolve: - return str(int(value)) - return str(value) - - -class CronRange(object): - """A range between one value and another for a time range.""" - def __init__(self, vslice, *vrange): - self.slice = vslice - self.cron = None - self.seq = 1 - - if not vrange: - self.all() - elif isinstance(vrange[0], basestring): - self.parse(vrange[0]) - elif isinstance(vrange[0], int) or isinstance(vrange[0], CronValue): - if len(vrange) == 2: - (self.vfrom, self.vto) = vrange - else: - self.seq = vrange[0] - self.all() - - def parse(self, value): - """Parse a ranged value in a cronjob""" - if value.count('/') == 1: - value, seq = value.split('/') - self.seq = self.slice.filter_v(seq) - if self.seq < 1 or self.seq > self.slice.max - 1: - raise ValueError("Sequence can not be divided by zero or max") - if value.count('-') == 1: - vfrom, vto = value.split('-') - self.vfrom = self.slice.filter_v(vfrom) - try: - self.vto = self.slice.filter_v(vto) - except SundayError: - self.vto = 6 - elif value == '*': - self.all() - else: - raise ValueError('Unknown cron range value "%s"' % value) - - def all(self): - """Set this slice to all units between the miniumum and maximum""" - self.vfrom = self.slice.min - self.vto = self.slice.max - - def render(self, resolve=False): - """Render the ranged value for a cronjob""" - value = '*' - if int(self.vfrom) > self.slice.min or int(self.vto) < self.slice.max: - value = _render_values([self.vfrom, self.vto], '-', resolve) - if self.seq != 1: - value += "/%d" % self.seq - if value != '*' and SYSTEMV: - value = ','.join(map(str, self.range())) - return value - - def range(self): - """Returns the range of this cron slice as a iterable list""" - return range(int(self.vfrom), int(self.vto)+1, self.seq) - - def every(self, value): - """Set the sequence value for this range.""" - self.seq = int(value) - - def __lt__(self, value): - return int(self.vfrom) < int(value) - - def __gt__(self, value): - return int(self.vto) > int(value) - - def __int__(self): - return int(self.vfrom) - - def __str__(self): - return self.__unicode__() - - def __unicode__(self): - return self.render() From f9573e3a792cff7c4f73a697f1cbf6ea664f20f8 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Fri, 20 Jan 2017 10:49:49 +0100 Subject: [PATCH 04/41] Moved vendor package into munininfluxdb Should play niced with virtualenvs (removes the risk of a name-clash). --- munininfluxdb/commands/fetch.py | 2 +- munininfluxdb/munin.py | 2 +- {vendor => munininfluxdb/vendor}/__init__.py | 0 {vendor => munininfluxdb/vendor}/storable.py | 0 4 files changed, 2 insertions(+), 2 deletions(-) rename {vendor => munininfluxdb/vendor}/__init__.py (100%) rename {vendor => munininfluxdb/vendor}/storable.py (100%) diff --git a/munininfluxdb/commands/fetch.py b/munininfluxdb/commands/fetch.py index f6f06ae..89d18bd 100755 --- a/munininfluxdb/commands/fetch.py +++ b/munininfluxdb/commands/fetch.py @@ -19,7 +19,7 @@ try: import storable except ImportError: - from vendor import storable + from munininfluxdb.vendor import storable def pack_values(config, values): suffix = ":{0}".format(Defaults.DEFAULT_RRD_INDEX) diff --git a/munininfluxdb/munin.py b/munininfluxdb/munin.py index 36db6ec..376bf62 100644 --- a/munininfluxdb/munin.py +++ b/munininfluxdb/munin.py @@ -6,7 +6,7 @@ from utils import ProgressBar, Symbol from settings import Settings -from vendor import storable +from munininfluxdb.vendor import storable def discover_from_datafile(settings): """ diff --git a/vendor/__init__.py b/munininfluxdb/vendor/__init__.py similarity index 100% rename from vendor/__init__.py rename to munininfluxdb/vendor/__init__.py diff --git a/vendor/storable.py b/munininfluxdb/vendor/storable.py similarity index 100% rename from vendor/storable.py rename to munininfluxdb/vendor/storable.py From 2a3926010ef5dc939acd552f657c90e5bb8f1835 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Fri, 20 Jan 2017 10:53:09 +0100 Subject: [PATCH 05/41] Removed bash script, updated docs. --- README.md | 8 ++++++-- muninflux | 33 --------------------------------- munininfluxdb/commands/cron.py | 2 +- 3 files changed, 7 insertions(+), 36 deletions(-) delete mode 100755 muninflux diff --git a/README.md b/README.md index 3ebc2f9..7b9a074 100644 --- a/README.md +++ b/README.md @@ -57,10 +57,14 @@ Installation & Usage 3. Run ```import``` command: ``` - $ sudo ./muninflux import + $ sudo muninflux import ``` -4. A cron job will be automatically added after installation to refresh data from munin every 5 minutes (Munin default) +4. Install the CRON job to refresh data from munin every 5 minutes (Munin default) + + ``` + $ sudo muninflux cron install + ``` ### Some more details diff --git a/muninflux b/muninflux deleted file mode 100755 index cf3e472..0000000 --- a/muninflux +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -function usage() { - echo "usage:" $0 " []" - echo - echo "Available commands:" - echo " import Import data from an existing Munin setup to InfluxDB and (optionally) generate a Grafana dashboard" - echo " fetch Update values in InfluxDB based on the previous import" - echo " help Print this message" -} - -function launch_install_cron() { - python bin/fetch.py --install-cron $(dirname $(readlink -f "$0"))/bin/fetch.py -} - -if [[ $1 == "import" ]]; then - shift - python bin/import.py $@ && launch_install_cron -elif [[ $1 == "fetch" ]]; then - if [[ $2 == "--install-cron" ]]; then - launch_install_cron - else - shift - python bin/fetch.py $@ - fi -elif [[ $1 == "help" ]]; then - usage - exit 0 -else - usage - exit 1 -fi - diff --git a/munininfluxdb/commands/cron.py b/munininfluxdb/commands/cron.py index 735ddc5..3c6f2fe 100644 --- a/munininfluxdb/commands/cron.py +++ b/munininfluxdb/commands/cron.py @@ -23,7 +23,7 @@ def uninstall_cron(args): if os.geteuid() != 0: - print("It seems you are not root, please run \"muninflux fetch --uninstall-cron\" again with root privileges") + print("It seems you are not root, please run \"muninflux cron uninstall\" again with root privileges") sys.exit(1) cron = crontab.CronTab(user=CRON_USER) From 3f18fc21d8ba8218a6f3acc83650ced422f5744e Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 15 Mar 2017 07:37:06 +0100 Subject: [PATCH 06/41] WIP: separate CLI command to dump the RRD files. --- munininfluxdb/commands/dump.py | 72 +++++++++++++++++++++++++++++++ munininfluxdb/commands/import_.py | 32 -------------- munininfluxdb/main.py | 8 +++- 3 files changed, 79 insertions(+), 33 deletions(-) create mode 100644 munininfluxdb/commands/dump.py diff --git a/munininfluxdb/commands/dump.py b/munininfluxdb/commands/dump.py new file mode 100644 index 0000000..4f763d3 --- /dev/null +++ b/munininfluxdb/commands/dump.py @@ -0,0 +1,72 @@ +import logging + +from munininfluxdb import munin +from munininfluxdb import rrd +from munininfluxdb.settings import Settings, Defaults +from munininfluxdb.utils import Symbol + + +LOG = logging.getLogger(__name__) +NAME = 'dump' +DESCRIPTION = """ +The 'dump' command writes out the munin RRD files to XML. These XML files can +then be used by the 'load' command to import them into influxdb. +""" + + +def retrieve_munin_configuration(settings): + """ + """ + print("Exploring Munin structure") + + try: + settings = munin.discover_from_datafile(settings) + except Exception as e: + LOG.debug('Traceback:', exc_info=True) + print(" {0} Could not process datafile ({1}), will read www and RRD cache instead".format(Symbol.NOK_RED, settings.paths['datafile'])) + + # read /var/cache/munin/www to check what's currently displayed on the dashboard + settings = munin.discover_from_www(settings) + settings = rrd.discover_from_rrd(settings, insert_missing=False) + else: + print(" {0} Found {1}: extracted {2} measurement units".format(Symbol.OK_GREEN, settings.paths['datafile'], + settings.nb_fields)) + + # for each host, find the /var/lib/munin/ directory and check if node name and plugin conf match RRD files + try: + rrd.check_rrd_files(settings) + except Exception as e: + print(" {0} {1}".format(Symbol.NOK_RED, e)) + else: + print(" {0} Found {1} RRD files".format(Symbol.OK_GREEN, settings.nb_rrd_files)) + + return settings + + +def main(args): + settings = Settings(args) + settings = retrieve_munin_configuration(settings) + + # export RRD files as XML for (much) easier parsing (but takes much more time) + print("\nExporting RRD databases:".format(settings.nb_rrd_files)) + nb_xml = rrd.export_to_xml(settings) + print(" {0} Exported {1} RRD files to XML ({2})".format(Symbol.OK_GREEN, nb_xml, settings.paths['xml'])) + + +def setup(parser): + parser.add_argument('--xml-temp-path', default=Defaults.MUNIN_XML_FOLDER, + help='set path where to store result of RRD exported files (default: %(default)s)') + parser.add_argument('--keep-temp', action='store_true', + help='instruct to retain temporary files (mostly RRD\'s XML) after generation') + parser.add_argument('-v', '--verbose', type=int, default=1, + help='set verbosity level (0: quiet, 1: default, 2: debug)') + + # Munin + munargs = parser.add_argument_group('Munin parameters') + munargs.add_argument('--munin-path', default=Defaults.MUNIN_VAR_FOLDER, + help='path to main Munin folder (default: %(default)s)') + munargs.add_argument('--www', '--munin-www-path', default=Defaults.MUNIN_WWW_FOLDER, + help='path to main Munin folder (default: %(default)s)') + munargs.add_argument('--rrd', '--munin-rrd-path', default=Defaults.MUNIN_RRD_FOLDER, + help='path to main Munin folder (default: %(default)s)') + parser.set_defaults(func=main) diff --git a/munininfluxdb/commands/import_.py b/munininfluxdb/commands/import_.py index 1bd2c51..814a6ed 100755 --- a/munininfluxdb/commands/import_.py +++ b/munininfluxdb/commands/import_.py @@ -24,40 +24,11 @@ """ -def retrieve_munin_configuration(settings): - """ - """ - print("Exploring Munin structure") - - try: - settings = munin.discover_from_datafile(settings) - except Exception as e: - print(" {0} Could not process datafile ({1}), will read www and RRD cache instead".format(Symbol.NOK_RED, settings.paths['datafile'])) - - # read /var/cache/munin/www to check what's currently displayed on the dashboard - settings = munin.discover_from_www(settings) - settings = rrd.discover_from_rrd(settings, insert_missing=False) - else: - print(" {0} Found {1}: extracted {2} measurement units".format(Symbol.OK_GREEN, settings.paths['datafile'], - settings.nb_fields)) - - # for each host, find the /var/lib/munin/ directory and check if node name and plugin conf match RRD files - try: - rrd.check_rrd_files(settings) - except Exception as e: - print(" {0} {1}".format(Symbol.NOK_RED, e)) - else: - print(" {0} Found {1} RRD files".format(Symbol.OK_GREEN, settings.nb_rrd_files)) - - return settings - - def main(args): print("{0}Munin to InfluxDB migration tool{1}".format(Color.BOLD, Color.CLEAR)) print("-" * 20) settings = Settings(args) - settings = retrieve_munin_configuration(settings) # export RRD files as XML for (much) easier parsing (but takes much more time) print("\nExporting RRD databases:".format(settings.nb_rrd_files)) @@ -121,9 +92,6 @@ def main(args): def setup(parser): - parser.add_argument('--interactive', dest='interactive', action='store_true') - parser.add_argument('--no-interactive', dest='interactive', action='store_false') - parser.set_defaults(interactive=True) parser.add_argument('--xml-temp-path', default=Defaults.MUNIN_XML_FOLDER, help='set path where to store result of RRD exported files (default: %(default)s)') parser.add_argument('--keep-temp', action='store_true', diff --git a/munininfluxdb/main.py b/munininfluxdb/main.py index feab366..5932022 100644 --- a/munininfluxdb/main.py +++ b/munininfluxdb/main.py @@ -6,17 +6,22 @@ import munininfluxdb.commands.cron as cmd_cron import munininfluxdb.commands.fetch as cmd_fetch import munininfluxdb.commands.import_ as cmd_import +import munininfluxdb.commands.dump as cmd_dump def main(): parser = ArgumentParser(description='TODO') # TODO + parser.add_argument('--interactive', dest='interactive', action='store_true') + parser.add_argument('--no-interactive', dest='interactive', action='store_false') + parser.set_defaults(interactive=True) + subparsers = parser.add_subparsers( title='subcommands', description='valid subcommands', help='additional help' ) - for subcommand in (cmd_import, cmd_fetch, cmd_cron): + for subcommand in (cmd_import, cmd_fetch, cmd_cron, cmd_dump): subparser = subparsers.add_parser(subcommand.NAME, description=subcommand.DESCRIPTION) subcommand.setup(subparser) @@ -28,5 +33,6 @@ def main(): print("\n{0} Canceled.".format(Symbol.NOK_RED)) sys.exit(1) except Exception as e: + raise print("{0} Error: {1}".format(Symbol.NOK_RED, e)) sys.exit(1) From e3a0a22c032a67913561bacf5899bcf6b7782b68 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 15 Mar 2017 08:05:45 +0100 Subject: [PATCH 07/41] vendor package no longer required. --- munininfluxdb/vendor/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 munininfluxdb/vendor/__init__.py diff --git a/munininfluxdb/vendor/__init__.py b/munininfluxdb/vendor/__init__.py deleted file mode 100644 index e69de29..0000000 From 73e920a286ef481dc4e953a9d4cd9901a4480639 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 15 Mar 2017 07:56:13 +0100 Subject: [PATCH 08/41] Upgraded to latest storable version (Py3) --- munininfluxdb/commands/fetch.py | 5 +- munininfluxdb/munin.py | 3 +- munininfluxdb/vendor/storable.py | 357 ------------------------------- setup.py | 5 +- 4 files changed, 6 insertions(+), 364 deletions(-) delete mode 100644 munininfluxdb/vendor/storable.py diff --git a/munininfluxdb/commands/fetch.py b/munininfluxdb/commands/fetch.py index 89d18bd..86e3afd 100755 --- a/munininfluxdb/commands/fetch.py +++ b/munininfluxdb/commands/fetch.py @@ -9,6 +9,7 @@ from munininfluxdb.settings import Defaults import influxdb +import storable NAME = 'fetch' DESCRIPTION = """'fetch' command grabs fresh data gathered by a still running Munin installation and send it to InfluxDB. @@ -16,10 +17,6 @@ Currently, Munin needs to be still running to update the data in '/var/lib/munin/state-*' files. """ -try: - import storable -except ImportError: - from munininfluxdb.vendor import storable def pack_values(config, values): suffix = ":{0}".format(Defaults.DEFAULT_RRD_INDEX) diff --git a/munininfluxdb/munin.py b/munininfluxdb/munin.py index 376bf62..4b6514f 100644 --- a/munininfluxdb/munin.py +++ b/munininfluxdb/munin.py @@ -6,7 +6,8 @@ from utils import ProgressBar, Symbol from settings import Settings -from munininfluxdb.vendor import storable +import storable + def discover_from_datafile(settings): """ diff --git a/munininfluxdb/vendor/storable.py b/munininfluxdb/vendor/storable.py deleted file mode 100644 index bd24984..0000000 --- a/munininfluxdb/vendor/storable.py +++ /dev/null @@ -1,357 +0,0 @@ - -# -# License -# -# python storable is distributed under the zlib/libpng license, which is OSS -# (Open Source Software) compliant. -# -# Copyright (C) 2009 Tim Aerts -# -# This software is provided 'as-is', without any express or implied -# warranty. In no event will the authors be held liable for any damages -# arising from the use of this software. -# -# Permission is granted to anyone to use this software for any purpose, -# including commercial applications, and to alter it and redistribute it -# freely, subject to the following restrictions: -# -# 1. The origin of this software must not be misrepresented; you must not -# claim that you wrote the original software. If you use this software -# in a product, an acknowledgment in the product documentation would be -# appreciated but is not required. -# 2. Altered source versions must be plainly marked as such, and must not be -# misrepresented as being the original software. -# 3. This notice may not be removed or altered from any source distribution. -# -# Tim Aerts -# - -from struct import unpack -import cStringIO - -def _read_size(fh, cache): - return unpack(cache['size_unpack_fmt'], fh.read(4))[0] - -def SX_OBJECT(fh, cache): - # idx's are always big-endian dumped by storable's freeze/nfreeze I think - i = unpack('>I', fh.read(4))[0] - cache['has_sx_object'] = True - return (0, i) - -def SX_LSCALAR(fh, cache): - return fh.read(_read_size(fh, cache)) - -def SX_LUTF8STR(fh, cache): - return SX_LSCALAR(fh, cache) - -def SX_ARRAY(fh, cache): - data = [] - for i in range(0,_read_size(fh, cache)): - data.append(process_item(fh, cache)) - - return data - -def SX_HASH(fh, cache): - data = {} - for i in range(0,_read_size(fh, cache)): - value = process_item(fh, cache) - key = fh.read(_read_size(fh, cache)) - data[key] = value - - return data - -def SX_REF(fh, cache): - return process_item(fh, cache) - -def SX_UNDEF(fh, cache): - return None - -def SX_INTEGER(fh, cache): - return unpack(cache['int_unpack_fmt'], fh.read(8))[0] - -def SX_DOUBLE(fh, cache): - return unpack(cache['double_unpack_fmt'], fh.read(8))[0] - -def SX_BYTE(fh, cache): - return unpack('B', fh.read(1))[0] - 128 - -def SX_NETINT(fh, cache): - return unpack('>I', fh.read(4))[0] - -def SX_SCALAR(fh, cache): - size = unpack('B', fh.read(1))[0] - return fh.read(size) - -def SX_UTF8STR(fh, cache): - return SX_SCALAR(fh, cache) - -def SX_TIED_ARRAY(fh, cache): - return process_item(fh, cache) - -def SX_TIED_HASH(fh, cache): - return SX_TIED_ARRAY(fh, cache) - -def SX_TIED_SCALAR(fh, cache): - return SX_TIED_ARRAY(fh, cache) - -def SX_SV_UNDEF(fh, cache): - return None - -def SX_BLESS(fh, cache): - size = unpack('B', fh.read(1))[0] - package_name = fh.read(size) - cache['classes'].append(package_name) - return process_item(fh, cache) - -def SX_IX_BLESS(fh, cache): - indx = unpack('B', fh.read(1))[0] - package_name = cache['classes'][indx] - return process_item(fh, cache) - -def SX_OVERLOAD(fh, cache): - return process_item(fh, cache) - -def SX_TIED_KEY(fh, cache): - data = process_item(fh, cache) - key = process_item(fh, cache) - return data - -def SX_TIED_IDX(fh, cache): - data = process_item(fh, cache) - # idx's are always big-endian dumped by storable's freeze/nfreeze I think - indx_in_array = unpack('>I', fh.read(4))[0] - return data - -def SX_HOOK(fh, cache): - flags = unpack('B', fh.read(1))[0] - - while flags & int(0x40): # SHF_NEED_RECURSE - #print("SHF_NEED_RECURSE") - dummy = process_item(fh, cache) - #print(dummy) - flags = unpack('B', fh.read(1))[0] - #print("flags:"+str(flags)) - - #print("recursive done") - - if flags & int(0x20): # SHF_IDX_CLASSNAME - #print("SHF_IDX_CLASSNAME") - #print("where:"+str(fh.tell())) - if flags & int(0x04): # SHF_LARGE_CLASSLEN - #print("SHF_LARGE_CLASSLEN") - # TODO: test - indx = unpack('>I', fh.read(4))[0] - else: - indx = unpack('B', fh.read(1))[0] - #print("classindx:"+str(indx)) - package_name = cache['classes'][indx] - else: - #print("where:"+str(fh.tell())) - if flags & int(0x04): # SHF_LARGE_CLASSLEN - #print("SHF_LARGE_CLASSLEN") - # TODO: test - # FIXME: is this actually possible? - class_size = _read_size(fh, cache) - else: - class_size = unpack('B', fh.read(1))[0] - #print("size:"+str(class_size)) - - package_name = fh.read(class_size) - cache['classes'].append(package_name) - #print("size:"+str(class_size)+",package:"+str(package_name)) - - arguments = {} - - str_size = 0 - if flags & int(0x08): # SHF_LARGE_STRLEN - #print("SHF_LARGE_STRLEN") - str_size = _read_size(fh, cache) - else: - #print("where:"+str(fh.tell())) - str_size = unpack('B', fh.read(1))[0] - - if str_size: - frozen_str = fh.read(str_size) - #print("size:"+str(str_size)+",frozen_str:"+str(frozen_str)) - arguments[0] = frozen_str - - list_size = 0 - if flags & int(0x80): # SHF_HAS_LIST - #print("SHF_HAS_LIST") - if flags & int(0x10): # SHF_LARGE_LISTLEN - #print("SHF_LARGE_LISTLEN") - #print("where:"+str(fh.tell())) - list_size = _read_size(fh, cache) - else: - list_size = unpack('B', fh.read(1))[0] - - - #print("list_size:"+str(list_size)) - for i in range(0,list_size): - indx_in_array = unpack('>I', fh.read(4))[0] - #print("indx:"+str(indx_in_array)) - if indx_in_array in cache['objects']: - arguments[i+1] = cache['objects'][indx_in_array] - else: - arguments[i+1] = None - - # FIXME: implement the real callback STORABLE_thaw() still, for now, just - # return the dictionary 'arguments' as data - type = flags & int(0x03) # SHF_TYPE_MASK 0x03 - #print("flags:"+str(type)) - data = arguments - if type == 3: # SHT_EXTRA - # TODO - #print("SHT_EXTRA") - pass - if type == 0: # SHT_SCALAR - # TODO - #print("SHT_SCALAR") - pass - if type == 1: # SHT_ARRAY - # TODO - #print("SHT_ARRAY") - pass - if type == 2: # SHT_HASH - # TODO - #print("SHT_HASH") - pass - - - return data - -def SX_FLAG_HASH(fh, cache): - # TODO: NOT YET IMPLEMENTED!!!!!! - #print("SX_FLAG_HASH:where:"+str(fh.tell())) - flags = unpack('B', fh.read(1))[0] - size = _read_size(fh, cache) - #print("size:"+str(size)) - #print("flags:"+str(flags)) - data = {} - for i in range(0,size): - value = process_item(fh, cache) - flags = unpack('B', fh.read(1))[0] - keysize = _read_size(fh, cache) - key = None - if keysize: - key = fh.read(keysize) - data[key] = value - - return data - -# *AFTER* all the subroutines -engine = { - '\x00': SX_OBJECT, # ( 0): Already stored object - '\x01': SX_LSCALAR, # ( 1): Scalar (large binary) follows (length, data) - '\x02': SX_ARRAY, # ( 2): Array forthcoming (size, item list) - '\x03': SX_HASH, # ( 3): Hash forthcoming (size, key/value pair list) - '\x04': SX_REF, # ( 4): Reference to object forthcoming - '\x05': SX_UNDEF, # ( 5): Undefined scalar - '\x06': SX_INTEGER, # ( 6): Undefined scalar - '\x07': SX_DOUBLE, # ( 7): Double forthcoming - '\x08': SX_BYTE, # ( 8): (signed) byte forthcoming - '\x09': SX_NETINT, # ( 9): Integer in network order forthcoming - '\x0a': SX_SCALAR, # (10): Scalar (binary, small) follows (length, data) - '\x0b': SX_TIED_ARRAY, # (11): Tied array forthcoming - '\x0c': SX_TIED_HASH, # (12): Tied hash forthcoming - '\x0d': SX_TIED_SCALAR, # (13): Tied scalar forthcoming - '\x0e': SX_SV_UNDEF, # (14): Perl's immortal PL_sv_undef - '\x11': SX_BLESS, # (17): Object is blessed - '\x12': SX_IX_BLESS, # (18): Object is blessed, classname given by index - '\x13': SX_HOOK, # (19): Stored via hook, user-defined - '\x14': SX_OVERLOAD, # (20): Overloaded reference - '\x15': SX_TIED_KEY, # (21): Tied magic key forthcoming - '\x16': SX_TIED_IDX, # (22): Tied magic index forthcoming - '\x17': SX_UTF8STR, # (23): UTF-8 string forthcoming (small) - '\x18': SX_LUTF8STR, # (24): UTF-8 string forthcoming (large) - '\x19': SX_FLAG_HASH, # (25): Hash with flags forthcoming (size, flags, key/flags/value triplet list) -} - -exclude_for_cache = dict({ - '\x00':True, '\x0b':True, '\x0c':True, '\x0d':True, '\x11':True, '\x12':True -}) - -def handle_sx_object_refs(cache, data): - iterateelements = None - if type(data) is list: - iterateelements = enumerate(iter(data)) - elif type(data) is dict: - iterateelements = data.iteritems() - else: - return - - for k,item in iterateelements: - if type(item) is list or type(item) is dict: - handle_sx_object_refs(cache, item) - if type(item) is tuple: - data[k] = cache['objects'][item[1]] - return data - -def process_item(fh, cache): - magic_type = fh.read(1) - #print('magic:'+str(unpack('B',magic_type)[0])+",where:"+str(fh.tell())+',will do:'+str(engine[magic_type])) - if magic_type not in exclude_for_cache: - i = cache['objectnr'] - cache['objectnr'] = cache['objectnr']+1 - #print("set i:"+str(i)) - cache['objects'][i] = engine[magic_type](fh, cache) - #print("set i:"+str(i)+",to:"+str(cache['objects'][i])) - return cache['objects'][i] - else: - return engine[magic_type](fh, cache) - -def thaw(frozen_data): - fh = cStringIO.StringIO(frozen_data) - data = deserialize(fh); - fh.close(); - return data - -def retrieve(file): - fh = open(file, 'rb') - ignore = fh.read(4) - data = None - if ignore == 'pst0': - data = deserialize(fh) - fh.close() - return data - -def deserialize(fh): - magic = fh.read(1) - byteorder = '>' - if magic == '\x05': - version = fh.read(1) - #print("OK:nfreeze") - #pass - if magic == '\x04': - version = fh.read(1) - size = unpack('B', fh.read(1))[0] - archsize = fh.read(size) - #print("OK:freeze:" + str(byteorder)) - - # 32-bit ppc: 4321 - # 32-bit x86: 1234 - # 64-bit x86_64: 12345678 - - if archsize == '1234' or archsize == '12345678': - byteorder = '<' - else: - byteorder = '>' - - somethingtobeinvestigated = fh.read(4) - - #print('version:'+str(unpack('B', version)[0])); - cache = { - 'objects' : {}, - 'objectnr' : 0, - 'classes' : [], - 'has_sx_object' : False, - 'size_unpack_fmt' : byteorder + 'I', - 'int_unpack_fmt' : byteorder + 'Q', - 'double_unpack_fmt' : byteorder + 'd' - } - data = process_item(fh, cache) - - if cache['has_sx_object']: - handle_sx_object_refs(cache, data) - - return data diff --git a/setup.py b/setup.py index e31a483..3f92cbf 100644 --- a/setup.py +++ b/setup.py @@ -17,9 +17,10 @@ ] }, install_requires=[ - 'influxdb>=2.12.0', + 'influxdb >= 2.12.0', + 'python-crontab>=2.1.1', 'requests', - 'python-crontab>=2.1.1' + 'storable >= 1.0.0', ], packages=find_packages(), classifiers=[ From acea4515fbee97cb95a400e0fa8b5e1239299871 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Thu, 16 Mar 2017 08:27:40 +0100 Subject: [PATCH 09/41] Added egg-info to gitignore --- .gitignore | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 13c212b..582e624 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,9 @@ *.pyc *~ -venv/ -munininfluxdb/test* .idea/ -local/ +/*.egg-info data/ lib/ +local/ +munininfluxdb/test* +venv/ From 85fe31a1182d8326cbb29a7bdadc5f99efbf56cd Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Mon, 20 Mar 2017 08:20:45 +0100 Subject: [PATCH 10/41] Moved "cron" code into an external adapter & tests. Having the CRON commands in a separate module reduces the "contact area" between our application and the external dependency. This makes both testing and replacing the dependency easier in the future. --- munininfluxdb/commands/cron.py | 132 +++++++++++++++++++---------- munininfluxdb/commands/dump.py | 10 ++- munininfluxdb/commands/fetch.py | 10 ++- munininfluxdb/commands/import_.py | 10 ++- munininfluxdb/external/__init__.py | 0 munininfluxdb/external/cron.py | 42 +++++++++ munininfluxdb/main.py | 7 +- 7 files changed, 159 insertions(+), 52 deletions(-) create mode 100644 munininfluxdb/external/__init__.py create mode 100644 munininfluxdb/external/cron.py diff --git a/munininfluxdb/commands/cron.py b/munininfluxdb/commands/cron.py index 3c6f2fe..65b7c01 100644 --- a/munininfluxdb/commands/cron.py +++ b/munininfluxdb/commands/cron.py @@ -3,62 +3,98 @@ import pwd import sys -import crontab - from munininfluxdb.utils import Symbol, absolute_executable -try: - pwd.getpwnam('munin') -except KeyError: - CRON_USER = 'root' -else: - CRON_USER = 'munin' - # Cron job comment is used to uninstall and must not be manually deleted from the crontab CRON_COMMENT = 'Update InfluxDB with fresh values from Munin' NAME = 'cron' DESCRIPTION = 'Installs or uninstalls the CRON job' -def uninstall_cron(args): - if os.geteuid() != 0: - print("It seems you are not root, please run \"muninflux cron uninstall\" again with root privileges") - sys.exit(1) - - cron = crontab.CronTab(user=CRON_USER) - jobs = list(cron.find_comment(CRON_COMMENT)) - cron.remove(*jobs) - cron.write() - - nb = len(jobs) - if nb: - print("{0} Cron job uninstalled for user {1} ({2} entries deleted)".format(Symbol.OK_GREEN, CRON_USER, nb)) +def get_cron_user(): + try: + pwd.getpwnam('munin') + except KeyError: + output = 'root' else: - print("No matching job found (searching comment \"{1}\" in crontab for user {2})".format(Symbol.WARN_YELLOW, - CRON_COMMENT, CRON_USER)) - - -def install_cron(args): - script_path = absolute_executable() - cmd = '%s fetch' % script_path + output = 'munin' + return output + + +def uninstall_cron(cron_adapter): + """ + Creates a function which uses *cron_adapter* to remove an entry from the + CRONtab. + + See :py:mod:`munininfluxdb.external.cron` for an example of an adapter. + """ + def fun(args): + """ + Main function for the "cron uninstall" command. + + :param args: The result from parsing CLI arguments. + """ + if os.geteuid() != 0: + print("It seems you are not root, please run \"muninflux cron uninstall\" again with root privileges") + sys.exit(1) + + user = args.user or get_cron_user() + nb = cron_adapter.remove_by_comment(user, CRON_COMMENT) + + if nb: + print("{0} Cron job uninstalled for user {1} ({2} entries deleted)".format(Symbol.OK_GREEN, user, nb)) + else: + print("No matching job found (searching comment \"{1}\" in crontab for user {2})".format(Symbol.WARN_YELLOW, + CRON_COMMENT, user)) + return fun + + +def install_cron(cron_adapter): + """ + Creates a function which uses *cron_adapter* to add an entry to the CRONtab. + + See :py:mod:`munininfluxdb.external.cron` for an example of an adapter. + """ + def fun(args): + """ + Main function for the "cron install" command. + + :param args: The result from parsing CLI arguments. + :return: Whether the operation was successful or not. + :rtype: bool + """ + script_path = absolute_executable() + cmd = '%s fetch' % script_path + + if os.geteuid() != 0: + print("It seems you are not root, please run \"%s cron install\" again with root privileges") + sys.exit(1) + + user = args.user or get_cron_user() + success = cron_adapter.add_with_comment(user, cmd, args.period, CRON_COMMENT) + + print("{0} Cron job installed for user {1}".format(Symbol.OK_GREEN, user)) + return success + return fun + + +def setup(parser, injections): + """ + Sets up CLI argument parsing. + + The argument *injections* should be a dictionary containing a key 'cron' + mapping to a cron adapter. For an example cron adapter see + ``munininfluxdb/external/cron.py`` + + :param parser: The argument parser for this subcommand. + :param injections: A dictionary containing the key ``'cron'`` mapping to an + implementation of a CRON adapter. See + :py:mod:`munininfluxdb.external.cron` for an example. + """ + parser.add_argument('-u', '--user', default='', metavar='USER', + help='The CRON user') - if os.geteuid() != 0: - print("It seems you are not root, please run \"%s cron install\" again with root privileges") - sys.exit(1) - - cron = crontab.CronTab(user=CRON_USER) - job = cron.new(command=cmd, user=CRON_USER, comment=CRON_COMMENT) - job.minute.every(args.period) - - if job.is_valid() and job.is_enabled(): - cron.write() - - print("{0} Cron job installed for user {1}".format(Symbol.OK_GREEN, CRON_USER)) - return job.is_valid() and job.is_enabled() - - -def setup(parser): subparsers = parser.add_subparsers(title='CRON commands') install_parser = subparsers.add_parser( 'install', description='Installs the CRON job') @@ -68,5 +104,7 @@ def setup(parser): install_parser.add_argument( '-p', '--period', default=5, type=int, help="sets the period in minutes between each fetch in the cron job (default: %(default)dmin)") - install_parser.set_defaults(func=install_cron) - uninstall_parser.set_defaults(func=uninstall_cron) + + cron_adapter = injections['cron'] + install_parser.set_defaults(func=install_cron(cron_adapter)) + uninstall_parser.set_defaults(func=uninstall_cron(cron_adapter)) diff --git a/munininfluxdb/commands/dump.py b/munininfluxdb/commands/dump.py index 4f763d3..b352550 100644 --- a/munininfluxdb/commands/dump.py +++ b/munininfluxdb/commands/dump.py @@ -53,7 +53,15 @@ def main(args): print(" {0} Exported {1} RRD files to XML ({2})".format(Symbol.OK_GREEN, nb_xml, settings.paths['xml'])) -def setup(parser): +def setup(parser, injections): + """ + Sets up CLI argument parsing. + + The argument *injections* is currently unused in this command and is a + placeholder for the future. + + :param parser: The argument parser for this subcommand. + """ parser.add_argument('--xml-temp-path', default=Defaults.MUNIN_XML_FOLDER, help='set path where to store result of RRD exported files (default: %(default)s)') parser.add_argument('--keep-temp', action='store_true', diff --git a/munininfluxdb/commands/fetch.py b/munininfluxdb/commands/fetch.py index 86e3afd..27e4c3b 100755 --- a/munininfluxdb/commands/fetch.py +++ b/munininfluxdb/commands/fetch.py @@ -107,7 +107,15 @@ def main(args): print("{0} Updated configuration: {1}".format(Symbol.OK_GREEN, f.name)) -def setup(parser): +def setup(parser, injections): + """ + Sets up CLI argument parsing. + + The argument *injections* is currently unused in this command and is a + placeholder for the future. + + :param parser: The argument parser for this subcommand. + """ parser.add_argument('--config', default=Defaults.FETCH_CONFIG, help='overrides the default configuration file (default: %(default)s)') parser.set_defaults(func=main) diff --git a/munininfluxdb/commands/import_.py b/munininfluxdb/commands/import_.py index 814a6ed..06ceade 100755 --- a/munininfluxdb/commands/import_.py +++ b/munininfluxdb/commands/import_.py @@ -91,7 +91,15 @@ def main(args): print("Then we're good! Have a nice day!") -def setup(parser): +def setup(parser, injections): + """ + Sets up CLI argument parsing. + + The argument *injections* is currently unused in this command and is a + placeholder for the future. + + :param parser: The argument parser for this subcommand. + """ parser.add_argument('--xml-temp-path', default=Defaults.MUNIN_XML_FOLDER, help='set path where to store result of RRD exported files (default: %(default)s)') parser.add_argument('--keep-temp', action='store_true', diff --git a/munininfluxdb/external/__init__.py b/munininfluxdb/external/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/munininfluxdb/external/cron.py b/munininfluxdb/external/cron.py new file mode 100644 index 0000000..017e2da --- /dev/null +++ b/munininfluxdb/external/cron.py @@ -0,0 +1,42 @@ +""" +Default CRON implementation. + +This module represents a "border" to the outside world and provides a "seam" for +dependency injection/patching/mocking. Or easier replacement for the underlying +CRON library. +""" +import crontab + + +def remove_by_comment(user, comment): + """ + Searches for CRON entries of *user* containing *comment* as commment. Each + entry with that comment is removed from the CRONtab. + + :return: The number of deleted jobs. + :rtype: int + """ + cron = crontab.CronTab(user=user) + jobs = list(cron.find_comment(comment)) + cron.remove(*jobs) + cron.write() + return len(jobs) + + +def add_with_comment(user, cmd, period, comment): + """ + Adds a new entry running *cmd* for *uer* to the CRONtab. The entry will be + scheduled to run each *period* minutes. For identification (and removal by + the ``uninstall`` command, the entry is marked with the comment in + *comment*. + + :return: Whether the operation was successful or not. + :rtype: bool + """ + cron = crontab.CronTab(user=user) + job = cron.new(command=cmd, user=user, comment=comment) + job.minute.every(period) + success = job.is_valid() and job.is_enabled() + if success: + cron.write() + return success diff --git a/munininfluxdb/main.py b/munininfluxdb/main.py index 5932022..53c428a 100644 --- a/munininfluxdb/main.py +++ b/munininfluxdb/main.py @@ -4,9 +4,10 @@ from munininfluxdb.utils import Symbol import munininfluxdb.commands.cron as cmd_cron +import munininfluxdb.commands.dump as cmd_dump import munininfluxdb.commands.fetch as cmd_fetch import munininfluxdb.commands.import_ as cmd_import -import munininfluxdb.commands.dump as cmd_dump +import munininfluxdb.external.cron as cron def main(): @@ -24,7 +25,9 @@ def main(): for subcommand in (cmd_import, cmd_fetch, cmd_cron, cmd_dump): subparser = subparsers.add_parser(subcommand.NAME, description=subcommand.DESCRIPTION) - subcommand.setup(subparser) + subcommand.setup(subparser, { + 'cron': cron + }) args = parser.parse_args() try: From eb24d43326a4ed45613d09299282e22c8801ce07 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 21 Mar 2017 08:16:14 +0100 Subject: [PATCH 11/41] Extracted logic for parsing datafile lines. --- munininfluxdb/munin.py | 48 ++++++++++++++++++++++++++++++------------ 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/munininfluxdb/munin.py b/munininfluxdb/munin.py index 4b6514f..ef7ca3b 100644 --- a/munininfluxdb/munin.py +++ b/munininfluxdb/munin.py @@ -1,4 +1,5 @@ from __future__ import print_function +from collections import namedtuple import os import sys import pprint @@ -8,6 +9,35 @@ import storable +DataFileLine = namedtuple( + 'DataFileLine', + 'plugin value domain host field property') + + +def parse_datafile_line(line): + """ + Takes a line from a munin datafile and parses out the different values. + + Returns DataFileLine object. + """ + # header line + if line.startswith("version"): + return None + else: + line = line.strip() + + # ex: acadis.org;tesla:memory.swap.label swap + domain, tail = line.split(";", 1) + host, tail = tail.split(":", 1) + head, value = tail.split(" ", 1) + plugin_parts = head.rsplit(".", 2) + if len(plugin_parts) == 3: + plugin, field, property = plugin_parts + else: + return None + + return DataFileLine(plugin, value, domain, host, field, property) + def discover_from_datafile(settings): """ @@ -23,22 +53,12 @@ def discover_from_datafile(settings): line_number = line_number + 1 # We count lines starting at 1. Not 0 # header line - if line.startswith("version"): + parse_result = parse_datafile_line(line) + if not parse_result: continue else: - line = line.strip() - - # ex: acadis.org;tesla:memory.swap.label swap - domain, tail = line.split(";", 1) - host, tail = tail.split(":", 1) - head, value = tail.split(" ", 1) - plugin_parts = head.rsplit(".", 2) - if len(plugin_parts) == 3: - plugin, field, property = plugin_parts - else: - # TODO LOG.debug('Line #%d is an invalid plugin line. Skipping' % - # TODO line_number) - continue + plugin, value, domain, host, field, property = parse_result + # plugin name kept to allow running the plugin in fetch command plugin_name = plugin From 584baad11b4e9bad9de9829b607393ae2b740797 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 22 Mar 2017 21:48:52 +0100 Subject: [PATCH 12/41] Refactored datafile parsing for testing. --- munininfluxdb/munin.py | 62 ++++++++++++++++++++++++--------------- munininfluxdb/settings.py | 1 + 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/munininfluxdb/munin.py b/munininfluxdb/munin.py index ef7ca3b..858819f 100644 --- a/munininfluxdb/munin.py +++ b/munininfluxdb/munin.py @@ -18,6 +18,10 @@ def parse_datafile_line(line): """ Takes a line from a munin datafile and parses out the different values. + Lines are parsed as:: + + ;:.. value + Returns DataFileLine object. """ # header line @@ -39,6 +43,40 @@ def parse_datafile_line(line): return DataFileLine(plugin, value, domain, host, field, property) +def populate_settings(settings, datafile): + """ + Upgrades a settings object with values from a munin datafile. + + WARNING: THe *settings* object will be modified in-place! + + :param settings: The settings object to upgrade + :param datafile: The file-object to use for reading values from. + """ + for line_number, line in enumerate(datafile.readlines()): + line_number = line_number + 1 # We count lines starting at 1. Not 0 + + # header line + parse_result = parse_datafile_line(line) + if not parse_result: + continue + else: + plugin, value, domain, host, field, property = parse_result + + # plugin name kept to allow running the plugin in fetch command + plugin_name = plugin + + # if plugin.startswith("diskstats"): + # print(head, plugin_parts, len(plugin_parts), value) + + if len(plugin.strip()) == 0: + # plugin properties + settings.domains[domain].hosts[host].plugins[field].settings[property] = value + settings.domains[domain].hosts[host].plugins[field].original_name = plugin_name + else: + # field properties + settings.domains[domain].hosts[host].plugins[plugin].fields[field].settings[property] = value + + def discover_from_datafile(settings): """ /var/lib/munin/htmlconf.storable contains a copy of all informations required to build the graph (limits, legend, types...) @@ -49,29 +87,7 @@ def discover_from_datafile(settings): """ with open(settings.paths['datafile']) as f: - for line_number, line in enumerate(f.readlines()): - line_number = line_number + 1 # We count lines starting at 1. Not 0 - - # header line - parse_result = parse_datafile_line(line) - if not parse_result: - continue - else: - plugin, value, domain, host, field, property = parse_result - - # plugin name kept to allow running the plugin in fetch command - plugin_name = plugin - - # if plugin.startswith("diskstats"): - # print(head, plugin_parts, len(plugin_parts), value) - - if len(plugin.strip()) == 0: - # plugin properties - settings.domains[domain].hosts[host].plugins[field].settings[property] = value - settings.domains[domain].hosts[host].plugins[field].original_name = plugin_name - else: - # field properties - settings.domains[domain].hosts[host].plugins[plugin].fields[field].settings[property] = value + populate_settings(settings, f) # post parsing for domain, host, plugin, field in settings.iter_fields(): diff --git a/munininfluxdb/settings.py b/munininfluxdb/settings.py index 4e0e5a0..b03431a 100644 --- a/munininfluxdb/settings.py +++ b/munininfluxdb/settings.py @@ -124,6 +124,7 @@ def __init__(self, cli_args=None): self.nb_fields = 0 self.nb_rrd_files = 0 + def save_fetch_config(self): config = { "influxdb": self.influxdb, From 99d69c1fe541644bc38a93b0dd88dac07d9cf53c Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 22 Mar 2017 21:49:25 +0100 Subject: [PATCH 13/41] Added some simple tests. --- .gitignore | 1 - munininfluxdb/test/__init__.py | 0 munininfluxdb/test/test_externals.py | 62 ++++++++++++++++++++ munininfluxdb/test/test_munin.py | 84 ++++++++++++++++++++++++++++ 4 files changed, 146 insertions(+), 1 deletion(-) create mode 100644 munininfluxdb/test/__init__.py create mode 100644 munininfluxdb/test/test_externals.py create mode 100644 munininfluxdb/test/test_munin.py diff --git a/.gitignore b/.gitignore index 582e624..dbb6c1b 100644 --- a/.gitignore +++ b/.gitignore @@ -5,5 +5,4 @@ data/ lib/ local/ -munininfluxdb/test* venv/ diff --git a/munininfluxdb/test/__init__.py b/munininfluxdb/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/munininfluxdb/test/test_externals.py b/munininfluxdb/test/test_externals.py new file mode 100644 index 0000000..0334f07 --- /dev/null +++ b/munininfluxdb/test/test_externals.py @@ -0,0 +1,62 @@ +""" +Tests for external dependencies/external systems. + +This module makes use of mock to "simulate" the external calls. If "mock" is +unavailable, the tests are skipped. +""" +import unittest + +try: + from unittest.mock import patch, call, MagicMock + has_mock = True +except ImportError: + try: + from mock import patch, call, MagicMock + has_mock = True + except ImportError: + has_mock = False + + +@unittest.skipUnless(has_mock, "unittest.mock is not available.") +class TestCron(unittest.TestCase): + """ + Test Case for CRON commands. + """ + + def test_install(self): + from munininfluxdb.external import cron + with patch('munininfluxdb.external.cron.crontab') as ptch: + cron_instance = MagicMock() + ptch.CronTab.return_value = cron_instance + + cmd_instance = MagicMock() + cmd_instance.is_valid.return_value = True + cmd_instance.is_enabled.return_value = True + cron_instance.new.return_value = cmd_instance + + result = cron.add_with_comment('munin', 'foo bar', 10, 'hoi') + ptch.CronTab.assert_called_with(user='munin') + cron_instance.new.assert_called_with( + command='foo bar', + comment='hoi', + user='munin') + + cmd_instance.minute.every.assert_called_with(10) + self.assertTrue(result) + + def test_uninstall(self): + from munininfluxdb.external import cron + with patch('munininfluxdb.external.cron.crontab') as ptch: + cron_instance = MagicMock() + ptch.CronTab.return_value = cron_instance + + cron_instance.find_comment.return_value = [1, 2, 3] + + result = cron.remove_by_comment('munin', 'hoi') + + ptch.CronTab.assert_called_with(user='munin') + cron_instance.find_comment.assert_called_with('hoi') + cron_instance.remove.assert_called_with(1, 2, 3) + cron_instance.write.assert_called_with() + + self.assertEqual(result, 3) diff --git a/munininfluxdb/test/test_munin.py b/munininfluxdb/test/test_munin.py new file mode 100644 index 0000000..280baad --- /dev/null +++ b/munininfluxdb/test/test_munin.py @@ -0,0 +1,84 @@ +from io import StringIO +from textwrap import dedent +import unittest + +from munininfluxdb.munin import populate_settings +from munininfluxdb.settings import Settings + + +class TestDataFileHandling(unittest.TestCase): + + def test_populate_settings(self): + data = StringIO(dedent( + u'''\ + version 2.0.19-3 + group1;top.level.domain:postgres_locks_dbname.accesssharelock.type GAUGE + group1;top.level.domain:postgres_connections_db.template1.graph_data_size normal + group1;top.level.domain:cpu.system.info CPU time spent by the kernel in system activities + group1;top.level.domain:cpu.irq.graph_data_size normal + group1;top.level.domain:apache_volume.volume80.label port 80 + group1;top.level.domain:df.graph_vlabel % + group1;top.level.domain:df.graph_title Disk usage in percent + group1;top.level.domain:apache_volume.volume80.type DERIVE + group2;mailserver:memory.mapped.update_rate 300 + group2;mailserver:postfix_mailstats.delivered.label No .label provided + group2;mailserver:postfix_mailstats.delivered.update_rate 300 + group2;mailserver:postfix_mailstats.delivered.extinfo NOTE: The plugin did not provide any label for the data source delivered. It is in need of fixing. + group2;mailserver:postfix_mailstats.delivered.graph_data_size normal + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_category homematic + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_title Heizung Kummer temp + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_vlabel temp + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_printf %3.0lf + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_args --base 1000 --lower-limit -10 --upper-limit 45 + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.a.value 10 + ''')) + + expected_groups = {'group1', 'group2', 'group3'} + expected_domains = { + 'top.level.domain', 'mailserver', 'blackdragon.fritz.box'} + expected_fields = { + 'a', + 'delivered', + 'template1', + 'irq', + 'system', + 'accesssharelock', + 'volume80', + 'mapped' + } + expected_plugins = { + 'postgres_locks_dbname', + 'cpu', + 'apache_volume', + 'postgres_connections_db', + 'homematic_radiator_kummer_temperature', + 'postfix_mailstats', + 'memory', + } + + settings = Settings() + + self.assertEqual(settings.domains.keys(), []) + + populate_settings(settings, data) + + self.assertEqual(set(settings.domains.keys()), + {'group1', 'group2', 'group3'}) + + plugins = {_[-1] for _ in settings.iter_plugins()} + self.assertEqual(plugins, expected_plugins) + + groups = set() + domains = set() + plugins = set() + fields = set() + for group, domain, plugin, field in settings.iter_fields(): + groups.add(group) + domains.add(domain) + plugins.add(plugin) + fields.add(field) + + self.assertEqual(groups, expected_groups) + self.assertEqual(domains, expected_domains) + self.assertEqual(plugins, expected_plugins) + self.assertEqual(fields, expected_fields) From 86844d1e475fa8c4991df24a6db339a10ced6d5b Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Thu, 23 Mar 2017 06:59:04 +0100 Subject: [PATCH 14/41] Added .cache to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index dbb6c1b..755428e 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ data/ lib/ local/ venv/ +/.cache From 1b96c71c47861087e8e62d3a86285b8ebae8b42a Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Thu, 23 Mar 2017 07:25:02 +0100 Subject: [PATCH 15/41] Further refactoring of settings generation. --- munininfluxdb/munin.py | 39 +++++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/munininfluxdb/munin.py b/munininfluxdb/munin.py index 858819f..5c0663c 100644 --- a/munininfluxdb/munin.py +++ b/munininfluxdb/munin.py @@ -77,27 +77,22 @@ def populate_settings(settings, datafile): settings.domains[domain].hosts[host].plugins[plugin].fields[field].settings[property] = value -def discover_from_datafile(settings): +def generate_filenames(settings): """ - /var/lib/munin/htmlconf.storable contains a copy of all informations required to build the graph (limits, legend, types...) - Parsing it should be much easier and much faster than running munin-run config - - @param filename: usually /var/lib/munin/datafile - @return: settings + Generates RRD and XML Filenames """ - - with open(settings.paths['datafile']) as f: - populate_settings(settings, f) - - # post parsing for domain, host, plugin, field in settings.iter_fields(): _field = settings.domains[domain].hosts[host].plugins[plugin].fields[field] - settings.nb_fields += 1 - type_suffix = _field.settings["type"].lower()[0] _field.rrd_filename = os.path.join(settings.paths['munin'], domain, "{0}-{1}-{2}-{3}.rrd".format(host, plugin.replace(".", "-"), field, type_suffix)) _field.xml_filename = os.path.join(settings.paths['xml'], "{0}-{1}-{2}-{3}-{4}.xml".format(domain, host, plugin.replace(".", "-"), field, type_suffix)) + +def cleanup(settings): + """ + Removes unneeded fields (multigraph intermediaries). + """ + for domain, host, plugin, field in settings.iter_fields(): # remove multigraph intermediates if '.' in plugin: mg_plugin, mg_field = plugin.split(".") @@ -105,10 +100,26 @@ def discover_from_datafile(settings): and mg_field in settings.domains[domain].hosts[host].plugins[mg_plugin].fields: del settings.domains[domain].hosts[host].plugins[mg_plugin].fields[mg_field] - settings.nb_fields -= 1 + + +def discover_from_datafile(settings): + """ + /var/lib/munin/htmlconf.storable contains a copy of all informations required to build the graph (limits, legend, types...) + Parsing it should be much easier and much faster than running munin-run config + + @param filename: usually /var/lib/munin/datafile + @return: settings + """ + + with open(settings.paths['datafile']) as f: + populate_settings(settings, f) + + generate_filenames(settings) + cleanup(settings) return settings + def discover_from_www(settings): """ Builds a Munin dashboard structure (domain/host/plugins) by reading the HTML files From 68c93c5f4e415ea05cdf16f38a67e36bde4adf2a Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Thu, 23 Mar 2017 07:25:21 +0100 Subject: [PATCH 16/41] Added test for RRD and XML filename generation --- munininfluxdb/test/test_munin.py | 95 +++++++++++++++++++++++--------- 1 file changed, 69 insertions(+), 26 deletions(-) diff --git a/munininfluxdb/test/test_munin.py b/munininfluxdb/test/test_munin.py index 280baad..e337aa1 100644 --- a/munininfluxdb/test/test_munin.py +++ b/munininfluxdb/test/test_munin.py @@ -2,37 +2,43 @@ from textwrap import dedent import unittest -from munininfluxdb.munin import populate_settings +from munininfluxdb.munin import ( + generate_filenames, + populate_settings, +) from munininfluxdb.settings import Settings +EXAMPLE_DATA = StringIO(dedent( + u'''\ + version 2.0.19-3 + group1;top.level.domain:postgres_locks_dbname.accesssharelock.type GAUGE + group1;top.level.domain:postgres_connections_db.template1.graph_data_size normal + group1;top.level.domain:cpu.system.info CPU time spent by the kernel in system activities + group1;top.level.domain:cpu.irq.graph_data_size normal + group1;top.level.domain:apache_volume.volume80.label port 80 + group1;top.level.domain:df.graph_vlabel % + group1;top.level.domain:df.graph_title Disk usage in percent + group1;top.level.domain:apache_volume.volume80.type DERIVE + group2;mailserver:memory.mapped.update_rate 300 + group2;mailserver:postfix_mailstats.delivered.label No .label provided + group2;mailserver:postfix_mailstats.delivered.update_rate 300 + group2;mailserver:postfix_mailstats.delivered.extinfo NOTE: The plugin did not provide any label for the data source delivered. It is in need of fixing. + group2;mailserver:postfix_mailstats.delivered.graph_data_size normal + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_category homematic + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_title Heizung Kummer temp + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_vlabel temp + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_printf %3.0lf + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_args --base 1000 --lower-limit -10 --upper-limit 45 + group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.a.value 10 + ''')) + class TestDataFileHandling(unittest.TestCase): - def test_populate_settings(self): - data = StringIO(dedent( - u'''\ - version 2.0.19-3 - group1;top.level.domain:postgres_locks_dbname.accesssharelock.type GAUGE - group1;top.level.domain:postgres_connections_db.template1.graph_data_size normal - group1;top.level.domain:cpu.system.info CPU time spent by the kernel in system activities - group1;top.level.domain:cpu.irq.graph_data_size normal - group1;top.level.domain:apache_volume.volume80.label port 80 - group1;top.level.domain:df.graph_vlabel % - group1;top.level.domain:df.graph_title Disk usage in percent - group1;top.level.domain:apache_volume.volume80.type DERIVE - group2;mailserver:memory.mapped.update_rate 300 - group2;mailserver:postfix_mailstats.delivered.label No .label provided - group2;mailserver:postfix_mailstats.delivered.update_rate 300 - group2;mailserver:postfix_mailstats.delivered.extinfo NOTE: The plugin did not provide any label for the data source delivered. It is in need of fixing. - group2;mailserver:postfix_mailstats.delivered.graph_data_size normal - group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_category homematic - group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_title Heizung Kummer temp - group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_vlabel temp - group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_printf %3.0lf - group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.graph_args --base 1000 --lower-limit -10 --upper-limit 45 - group3;blackdragon.fritz.box:homematic_radiator_kummer_temperature.a.value 10 - ''')) + def setUp(self): + EXAMPLE_DATA.seek(0) + def test_populate_settings(self): expected_groups = {'group1', 'group2', 'group3'} expected_domains = { 'top.level.domain', 'mailserver', 'blackdragon.fritz.box'} @@ -60,7 +66,7 @@ def test_populate_settings(self): self.assertEqual(settings.domains.keys(), []) - populate_settings(settings, data) + populate_settings(settings, EXAMPLE_DATA) self.assertEqual(set(settings.domains.keys()), {'group1', 'group2', 'group3'}) @@ -82,3 +88,40 @@ def test_populate_settings(self): self.assertEqual(domains, expected_domains) self.assertEqual(plugins, expected_plugins) self.assertEqual(fields, expected_fields) + + def test_generate_filename(self): + settings = Settings() + populate_settings(settings, EXAMPLE_DATA) + generate_filenames(settings) + + expected_rrd_filenames = { + '/var/lib/munin/group1/top.level.domain-postgres_locks_dbname-accesssharelock-g.rrd', + '/var/lib/munin/group1/top.level.domain-cpu-irq-g.rrd', + '/var/lib/munin/group1/top.level.domain-cpu-system-g.rrd', + '/var/lib/munin/group1/top.level.domain-apache_volume-volume80-d.rrd', + '/var/lib/munin/group1/top.level.domain-postgres_connections_db-template1-g.rrd', + '/var/lib/munin/group3/blackdragon.fritz.box-homematic_radiator_kummer_temperature-a-g.rrd', + '/var/lib/munin/group2/mailserver-postfix_mailstats-delivered-g.rrd', + '/var/lib/munin/group2/mailserver-memory-mapped-g.rrd', + } + + expected_xml_filenames = { + '/tmp/munin-influxdb/xml/group2-mailserver-memory-mapped-g.xml', + '/tmp/munin-influxdb/xml/group2-mailserver-postfix_mailstats-delivered-g.xml', + '/tmp/munin-influxdb/xml/group3-blackdragon.fritz.box-homematic_radiator_kummer_temperature-a-g.xml', + '/tmp/munin-influxdb/xml/group1-top.level.domain-postgres_connections_db-template1-g.xml', + '/tmp/munin-influxdb/xml/group1-top.level.domain-apache_volume-volume80-d.xml', + '/tmp/munin-influxdb/xml/group1-top.level.domain-cpu-system-g.xml', + '/tmp/munin-influxdb/xml/group1-top.level.domain-cpu-irq-g.xml', + '/tmp/munin-influxdb/xml/group1-top.level.domain-postgres_locks_dbname-accesssharelock-g.xml', + } + + rrd_filenames = set() + xml_filenames = set() + for domain, host, plugin, field in settings.iter_fields(): + _field = settings.domains[domain].hosts[host].plugins[plugin].fields[field] + rrd_filenames.add(_field.rrd_filename) + xml_filenames.add(_field.xml_filename) + + self.assertEqual(expected_xml_filenames, xml_filenames) + self.assertEqual(expected_rrd_filenames, rrd_filenames) From bc2e102f1b0e6ebf62dc4a1ecb4b1c20c3c1094e Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Mon, 27 Mar 2017 08:15:32 +0200 Subject: [PATCH 17/41] Addded two placeholder tests. --- munininfluxdb/test/test_munin.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/munininfluxdb/test/test_munin.py b/munininfluxdb/test/test_munin.py index e337aa1..dff7fd0 100644 --- a/munininfluxdb/test/test_munin.py +++ b/munininfluxdb/test/test_munin.py @@ -38,7 +38,14 @@ class TestDataFileHandling(unittest.TestCase): def setUp(self): EXAMPLE_DATA.seek(0) - def test_populate_settings(self): + def test_read_state_file(self): + self.skipTest("I don't know what the method " + "read_state_file is supposed to do.") # TODO + + def test_populate_settings_from_www(self): + self.skipTest('I have no exmaple HTML file at hand right now') # TODO + + def test_populate_settings_from_datafile(self): expected_groups = {'group1', 'group2', 'group3'} expected_domains = { 'top.level.domain', 'mailserver', 'blackdragon.fritz.box'} From aae45118215e06906ac7d3669afa5e3fe28ddcc6 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 29 Mar 2017 08:30:14 +0200 Subject: [PATCH 18/41] Tests for Grafana panels and queries & bugfixes These tests surfaced two bugs which have been fixed with this commit as well. --- munininfluxdb/grafana.py | 4 +- munininfluxdb/test/test_grafana.py | 233 +++++++++++++++++++++++++++++ 2 files changed, 235 insertions(+), 2 deletions(-) create mode 100644 munininfluxdb/test/test_grafana.py diff --git a/munininfluxdb/grafana.py b/munininfluxdb/grafana.py index 99900ac..3d0826c 100644 --- a/munininfluxdb/grafana.py +++ b/munininfluxdb/grafana.py @@ -74,7 +74,7 @@ def process_graph_thresholds(self, fields): @see http://munin-monitoring.org/wiki/fieldname.warning @see http://munin-monitoring.org/wiki/fieldname.critical """ - warnings = {fields[field].settings.get("warning") for field in fields if "warnings" in fields[field].settings} + warnings = {fields[field].settings.get("warning") for field in fields if "warning" in fields[field].settings} criticals = {fields[field].settings.get("critical") for field in fields if "critical" in fields[field].settings} if len(warnings) > 1 or len(criticals) > 1: @@ -112,7 +112,7 @@ def process_graph_types(self, fields): if hasArea: self.fill = 5 self.linewidth = 0 - if hasArea: + if hasStack: self.stack = True # build overrides list diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py new file mode 100644 index 0000000..22e6f3d --- /dev/null +++ b/munininfluxdb/test/test_grafana.py @@ -0,0 +1,233 @@ +try: + from unittest.mock import patch, call, MagicMock + has_mock = True +except ImportError: + try: + from mock import patch, call, MagicMock # NOQA + has_mock = True + except ImportError: + has_mock = False + +import unittest + +import munininfluxdb.grafana as gf + + +DEFAULT_LINE_WIDTH = 1 +DEFAULT_FILL = 5 + + +class TestQuery(unittest.TestCase): + + def test_to_json(self): + + query = gf.Query(1.0, "thefield") + result = query.to_json(None) + + expected = { + "dsType": "influxdb", + "measurement": 1.0, + "select": [ + [ + {"params": ["thefield"], "type": "field"}, + {"params": [], "type": "mean"} + ] + ], + "groupBy": [ + {"params": ["$interval"], "type": "time"}, + {"params": ["null"], "type": "fill"} + ], + "resultFormat": "time_series", + "alias": "thefield" + } + + self.assertEqual(result, expected) + + +class TestPanel(unittest.TestCase): + + def setUp(self): + self.panel = gf.Panel(title="Hello", measurement=1.2) + + def test_add_query(self): + self.assertEqual(len(self.panel.queries), 0) + self.panel.add_query("thefield") + self.assertEqual(len(self.panel.queries), 1) + self.assertEqual(self.panel.queries[0].field, "thefield") + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_sort_queries(self): + a = MagicMock(name='11:30', field='11', field_2=30) + b = MagicMock(name='21:40', field='21', field_2=40) + c = MagicMock(name='10:20', field='10', field_2=20) + d = MagicMock(name='20:10', field='20', field_2=10) + + expected = [c, a, d, b] + + self.panel.queries = [a, b, c, d] + self.panel.sort_queries('10 11 20 21') + + self.assertEqual(self.panel.queries, expected) + + def test_process_graph_settings(self): + plugin_settings = { + 'graph_vlabel': 'vlabel ${graph_period}', + 'graph_period': 'foo', + 'graph_order': 'a b' + } + self.assertIsNone(self.panel.leftYAxisLabel) + self.panel.process_graph_settings(plugin_settings) + self.assertEqual(self.panel.leftYAxisLabel, 'vlabel foo') + + def test_process_graph_settings_default_period(self): + plugin_settings = { + 'graph_vlabel': 'vlabel ${graph_period}', + 'graph_order': 'a b' + } + self.assertIsNone(self.panel.leftYAxisLabel) + self.panel.process_graph_settings(plugin_settings) + self.assertEqual(self.panel.leftYAxisLabel, 'vlabel second') + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_thresholds_multiple_warnings(self): + fields = { + 'foo': MagicMock(settings={'warning': '10:20'}), + 'bar': MagicMock(settings={'warning': '20:30'}), + } + result = self.panel.process_graph_thresholds(fields) + self.assertEqual(self.panel.thresholds, {}) + self.assertEqual(result, None) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_thresholds_multiple_criticals(self): + fields = { + 'foo': MagicMock(settings={'critical': '10:20'}), + 'bar': MagicMock(settings={'critical': '20:30'}), + } + result = self.panel.process_graph_thresholds(fields) + self.assertEqual(self.panel.thresholds, {}) + self.assertEqual(result, None) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_thresholds_critical(self): + fields = { + 'foo': MagicMock(settings={'critical': '10:20'}), + } + result = self.panel.process_graph_thresholds(fields) + self.assertEqual(self.panel.thresholds, { + 'threshold1': 20.0, # TODO is this really the expected value? + 'threshold2': 20.0, + 'thresholdLine': False + }) + self.assertEqual(result, None) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_thresholds_warning(self): + fields = { + 'bar': MagicMock(settings={'warning': '20:30'}), + } + result = self.panel.process_graph_thresholds(fields) + self.assertEqual(self.panel.thresholds, { + 'threshold1': 30.0, + 'thresholdLine': False + }) + self.assertEqual(result, None) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_stack(self): + fields = { + 'baz': MagicMock(settings={'draw': 'STACK'}), + } + result = self.panel.process_graph_types(fields) + self.assertTrue(self.panel.stack) + self.assertEqual(self.panel.linewidth, DEFAULT_LINE_WIDTH) + self.assertEqual(self.panel.fill, 0) + self.assertEqual(self.panel.overrides, []) + self.assertEqual(self.panel.alias_colors, {}) + self.assertIsNone(result) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_area(self): + fields = { + 'bar': MagicMock(settings={'draw': 'AREA'}), + } + result = self.panel.process_graph_types(fields) + self.assertFalse(self.panel.stack) + self.assertEqual(self.panel.fill, DEFAULT_FILL) + self.assertEqual(self.panel.linewidth, 0) + self.assertEqual(self.panel.overrides, []) + self.assertEqual(self.panel.alias_colors, {}) + self.assertIsNone(result) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_line_area(self): + fields = { + 'foo': MagicMock(settings={'draw': 'LINEAREA'}), + } + result = self.panel.process_graph_types(fields) + self.assertFalse(self.panel.stack) + self.assertEqual(self.panel.fill, DEFAULT_FILL) + self.assertEqual(self.panel.linewidth, 0) + self.assertEqual(self.panel.overrides, [{ + 'alias': 'foo', + 'fill': 0 # Is this really the expected value? + }]) + self.assertEqual(self.panel.alias_colors, {}) + self.assertIsNone(result) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_default(self): + fields = { + 'foo': MagicMock(settings={}), + } + result = self.panel.process_graph_types(fields) + self.assertFalse(self.panel.stack) + self.assertEqual(self.panel.fill, 0) + self.assertEqual(self.panel.linewidth, DEFAULT_LINE_WIDTH) + self.assertEqual(self.panel.overrides, []) + self.assertEqual(self.panel.alias_colors, {}) + self.assertIsNone(result) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_line(self): + fields = { + 'foo': MagicMock(settings={'draw': 'LINE'}), + } + result = self.panel.process_graph_types(fields) + self.assertFalse(self.panel.stack) + self.assertEqual(self.panel.fill, 0) + self.assertEqual(self.panel.linewidth, DEFAULT_LINE_WIDTH) + self.assertEqual(self.panel.overrides, []) + self.assertEqual(self.panel.alias_colors, {}) + self.assertIsNone(result) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_linestack(self): + fields = { + 'foo': MagicMock(settings={'draw': 'LINESTACK'}), + } + result = self.panel.process_graph_types(fields) + self.assertTrue(self.panel.stack) + self.assertEqual(self.panel.fill, 0) + self.assertEqual(self.panel.linewidth, DEFAULT_LINE_WIDTH) + self.assertEqual(self.panel.overrides, []) + self.assertEqual(self.panel.alias_colors, {}) + self.assertIsNone(result) + + @unittest.skipUnless(has_mock, "unittest.mock is not available.") + def test_process_graph_types_colours(self): + fields = { + 'foo': MagicMock(settings={'colour': '123456'}), + } + result = self.panel.process_graph_types(fields) + self.assertFalse(self.panel.stack) + self.assertEqual(self.panel.fill, 0) + self.assertEqual(self.panel.linewidth, DEFAULT_LINE_WIDTH) + self.assertEqual(self.panel.overrides, []) + self.assertEqual(self.panel.alias_colors, { + 'foo': '#123456' + }) + self.assertIsNone(result) + + def test_to_json(self): + self.skipTest('TODO') From 88cc02b1ce2f93016ecae7e57bb1cc3ff83d061f Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Mon, 3 Apr 2017 08:23:35 +0200 Subject: [PATCH 19/41] Moved "ask_password" from influxdbclient to utils. It was running under the namespace "InfluxdbClient" yet it had nothing to do with it. This makes it more veratile and removed a dependency on InfluxdbClient from `grafana.py` --- munininfluxdb/commands/import_.py | 4 ++-- munininfluxdb/grafana.py | 5 ++--- munininfluxdb/influxdbclient.py | 5 ----- munininfluxdb/utils.py | 5 +++++ 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/munininfluxdb/commands/import_.py b/munininfluxdb/commands/import_.py index 06ceade..ea1c3f5 100755 --- a/munininfluxdb/commands/import_.py +++ b/munininfluxdb/commands/import_.py @@ -6,7 +6,7 @@ from munininfluxdb.settings import Settings, Defaults from munininfluxdb.influxdbclient import InfluxdbClient from munininfluxdb.grafana import Dashboard -from munininfluxdb.utils import Color, Symbol, prompt +from munininfluxdb.utils import Color, Symbol, prompt, ask_password LOG = logging.getLogger(__name__) @@ -42,7 +42,7 @@ def main(args): else: # even in non-interactive mode, we ask for the password if empty if not exporter.settings.influxdb['password']: - exporter.settings.influxdb['password'] = InfluxdbClient.ask_password() + exporter.settings.influxdb['password'] = ask_password() exporter.connect() exporter.test_db(exporter.settings.influxdb['database']) # needed to create db if missing diff --git a/munininfluxdb/grafana.py b/munininfluxdb/grafana.py index 3d0826c..c666866 100644 --- a/munininfluxdb/grafana.py +++ b/munininfluxdb/grafana.py @@ -3,10 +3,9 @@ import json import urlparse -from utils import ProgressBar, Color, Symbol +from utils import ProgressBar, Color, Symbol, ask_password from pprint import pprint from settings import Settings -from influxdbclient import InfluxdbClient import requests @@ -238,7 +237,7 @@ def prompt_setup(self): if GrafanaApi.test_host(setup['host']): while not GrafanaApi.test_auth(setup['host'], setup['auth']): user = raw_input(" - user [admin]: ").strip() or "admin" - password = InfluxdbClient.ask_password() + password = ask_password() setup['auth'] = (user, password) setup['access'] = None diff --git a/munininfluxdb/influxdbclient.py b/munininfluxdb/influxdbclient.py index 33dd306..56b03e2 100644 --- a/munininfluxdb/influxdbclient.py +++ b/munininfluxdb/influxdbclient.py @@ -1,6 +1,5 @@ from __future__ import print_function import os -import getpass import json from collections import defaultdict from pprint import pprint @@ -107,10 +106,6 @@ def list_columns(self, series="/.*/"): return res - @staticmethod - def ask_password(): - return getpass.getpass(" - password: ") - def prompt_setup(self): setup = self.settings.influxdb print("\n{0}InfluxDB: Please enter your connection information{1}".format(Color.BOLD, Color.CLEAR)) diff --git a/munininfluxdb/utils.py b/munininfluxdb/utils.py index 9bf3f08..c22fc1a 100644 --- a/munininfluxdb/utils.py +++ b/munininfluxdb/utils.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import print_function from os.path import join, basename +from getpass import getpass import sys @@ -56,6 +57,10 @@ def absolute_executable(): return join(sys.prefix, 'bin', basename(sys.argv[0])) +def ask_password(): + return getpass(" - password: ") + + def parse_handle(handle): """ Parses a connection handle to get it's subparts (user, password, host, port, dbname) From 6fd6253af2601cc95a72b53822e77b1f0afc8023 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 4 Apr 2017 08:16:11 +0200 Subject: [PATCH 20/41] Centralized "mock" import (py2/py3) --- munininfluxdb/test/__init__.py | 7 +++ munininfluxdb/test/test_externals.py | 26 ++++------- munininfluxdb/test/test_grafana.py | 70 ++++++++++++---------------- 3 files changed, 47 insertions(+), 56 deletions(-) diff --git a/munininfluxdb/test/__init__.py b/munininfluxdb/test/__init__.py index e69de29..2de2cd3 100644 --- a/munininfluxdb/test/__init__.py +++ b/munininfluxdb/test/__init__.py @@ -0,0 +1,7 @@ +try: + import unittest.mock as mock +except ImportError: + try: + import mock + except ImportError: + mock = None diff --git a/munininfluxdb/test/test_externals.py b/munininfluxdb/test/test_externals.py index 0334f07..069f483 100644 --- a/munininfluxdb/test/test_externals.py +++ b/munininfluxdb/test/test_externals.py @@ -6,18 +6,10 @@ """ import unittest -try: - from unittest.mock import patch, call, MagicMock - has_mock = True -except ImportError: - try: - from mock import patch, call, MagicMock - has_mock = True - except ImportError: - has_mock = False - - -@unittest.skipUnless(has_mock, "unittest.mock is not available.") +from . import mock + + +@unittest.skipUnless(mock, "unittest.mock is not available.") class TestCron(unittest.TestCase): """ Test Case for CRON commands. @@ -25,11 +17,11 @@ class TestCron(unittest.TestCase): def test_install(self): from munininfluxdb.external import cron - with patch('munininfluxdb.external.cron.crontab') as ptch: - cron_instance = MagicMock() + with mock.patch('munininfluxdb.external.cron.crontab') as ptch: + cron_instance = mock.MagicMock() ptch.CronTab.return_value = cron_instance - cmd_instance = MagicMock() + cmd_instance = mock.MagicMock() cmd_instance.is_valid.return_value = True cmd_instance.is_enabled.return_value = True cron_instance.new.return_value = cmd_instance @@ -46,8 +38,8 @@ def test_install(self): def test_uninstall(self): from munininfluxdb.external import cron - with patch('munininfluxdb.external.cron.crontab') as ptch: - cron_instance = MagicMock() + with mock.patch('munininfluxdb.external.cron.crontab') as ptch: + cron_instance = mock.MagicMock() ptch.CronTab.return_value = cron_instance cron_instance.find_comment.return_value = [1, 2, 3] diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py index 22e6f3d..654b48b 100644 --- a/munininfluxdb/test/test_grafana.py +++ b/munininfluxdb/test/test_grafana.py @@ -1,17 +1,9 @@ -try: - from unittest.mock import patch, call, MagicMock - has_mock = True -except ImportError: - try: - from mock import patch, call, MagicMock # NOQA - has_mock = True - except ImportError: - has_mock = False - import unittest import munininfluxdb.grafana as gf +from . import mock + DEFAULT_LINE_WIDTH = 1 DEFAULT_FILL = 5 @@ -55,12 +47,12 @@ def test_add_query(self): self.assertEqual(len(self.panel.queries), 1) self.assertEqual(self.panel.queries[0].field, "thefield") - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_sort_queries(self): - a = MagicMock(name='11:30', field='11', field_2=30) - b = MagicMock(name='21:40', field='21', field_2=40) - c = MagicMock(name='10:20', field='10', field_2=20) - d = MagicMock(name='20:10', field='20', field_2=10) + a = mock.MagicMock(name='11:30', field='11', field_2=30) + b = mock.MagicMock(name='21:40', field='21', field_2=40) + c = mock.MagicMock(name='10:20', field='10', field_2=20) + d = mock.MagicMock(name='20:10', field='20', field_2=10) expected = [c, a, d, b] @@ -88,30 +80,30 @@ def test_process_graph_settings_default_period(self): self.panel.process_graph_settings(plugin_settings) self.assertEqual(self.panel.leftYAxisLabel, 'vlabel second') - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_thresholds_multiple_warnings(self): fields = { - 'foo': MagicMock(settings={'warning': '10:20'}), - 'bar': MagicMock(settings={'warning': '20:30'}), + 'foo': mock.MagicMock(settings={'warning': '10:20'}), + 'bar': mock.MagicMock(settings={'warning': '20:30'}), } result = self.panel.process_graph_thresholds(fields) self.assertEqual(self.panel.thresholds, {}) self.assertEqual(result, None) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_thresholds_multiple_criticals(self): fields = { - 'foo': MagicMock(settings={'critical': '10:20'}), - 'bar': MagicMock(settings={'critical': '20:30'}), + 'foo': mock.MagicMock(settings={'critical': '10:20'}), + 'bar': mock.MagicMock(settings={'critical': '20:30'}), } result = self.panel.process_graph_thresholds(fields) self.assertEqual(self.panel.thresholds, {}) self.assertEqual(result, None) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_thresholds_critical(self): fields = { - 'foo': MagicMock(settings={'critical': '10:20'}), + 'foo': mock.MagicMock(settings={'critical': '10:20'}), } result = self.panel.process_graph_thresholds(fields) self.assertEqual(self.panel.thresholds, { @@ -121,10 +113,10 @@ def test_process_graph_thresholds_critical(self): }) self.assertEqual(result, None) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_thresholds_warning(self): fields = { - 'bar': MagicMock(settings={'warning': '20:30'}), + 'bar': mock.MagicMock(settings={'warning': '20:30'}), } result = self.panel.process_graph_thresholds(fields) self.assertEqual(self.panel.thresholds, { @@ -133,10 +125,10 @@ def test_process_graph_thresholds_warning(self): }) self.assertEqual(result, None) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_stack(self): fields = { - 'baz': MagicMock(settings={'draw': 'STACK'}), + 'baz': mock.MagicMock(settings={'draw': 'STACK'}), } result = self.panel.process_graph_types(fields) self.assertTrue(self.panel.stack) @@ -146,10 +138,10 @@ def test_process_graph_types_stack(self): self.assertEqual(self.panel.alias_colors, {}) self.assertIsNone(result) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_area(self): fields = { - 'bar': MagicMock(settings={'draw': 'AREA'}), + 'bar': mock.MagicMock(settings={'draw': 'AREA'}), } result = self.panel.process_graph_types(fields) self.assertFalse(self.panel.stack) @@ -159,10 +151,10 @@ def test_process_graph_types_area(self): self.assertEqual(self.panel.alias_colors, {}) self.assertIsNone(result) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_line_area(self): fields = { - 'foo': MagicMock(settings={'draw': 'LINEAREA'}), + 'foo': mock.MagicMock(settings={'draw': 'LINEAREA'}), } result = self.panel.process_graph_types(fields) self.assertFalse(self.panel.stack) @@ -175,10 +167,10 @@ def test_process_graph_types_line_area(self): self.assertEqual(self.panel.alias_colors, {}) self.assertIsNone(result) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_default(self): fields = { - 'foo': MagicMock(settings={}), + 'foo': mock.MagicMock(settings={}), } result = self.panel.process_graph_types(fields) self.assertFalse(self.panel.stack) @@ -188,10 +180,10 @@ def test_process_graph_types_default(self): self.assertEqual(self.panel.alias_colors, {}) self.assertIsNone(result) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_line(self): fields = { - 'foo': MagicMock(settings={'draw': 'LINE'}), + 'foo': mock.MagicMock(settings={'draw': 'LINE'}), } result = self.panel.process_graph_types(fields) self.assertFalse(self.panel.stack) @@ -201,10 +193,10 @@ def test_process_graph_types_line(self): self.assertEqual(self.panel.alias_colors, {}) self.assertIsNone(result) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_linestack(self): fields = { - 'foo': MagicMock(settings={'draw': 'LINESTACK'}), + 'foo': mock.MagicMock(settings={'draw': 'LINESTACK'}), } result = self.panel.process_graph_types(fields) self.assertTrue(self.panel.stack) @@ -214,10 +206,10 @@ def test_process_graph_types_linestack(self): self.assertEqual(self.panel.alias_colors, {}) self.assertIsNone(result) - @unittest.skipUnless(has_mock, "unittest.mock is not available.") + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_process_graph_types_colours(self): fields = { - 'foo': MagicMock(settings={'colour': '123456'}), + 'foo': mock.MagicMock(settings={'colour': '123456'}), } result = self.panel.process_graph_types(fields) self.assertFalse(self.panel.stack) From f94fe40a2df4aa831c5ab84bac46f29689cbd193 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 5 Apr 2017 21:19:07 +0200 Subject: [PATCH 21/41] Making it easier to mock influxdb --- munininfluxdb/influxdbclient.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/munininfluxdb/influxdbclient.py b/munininfluxdb/influxdbclient.py index 56b03e2..e2fc3b2 100644 --- a/munininfluxdb/influxdbclient.py +++ b/munininfluxdb/influxdbclient.py @@ -5,6 +5,7 @@ from pprint import pprint import influxdb +from influxdb.client import InfluxDBClientError try: # poor man's check assert influxdb.__version__[0] not in ('0', '1') @@ -33,7 +34,7 @@ def connect(self, silent=False): # dummy request to test connection client.get_list_database() - except influxdb.client.InfluxDBClientError as e: + except InfluxDBClientError as e: self.client, self.valid = None, False if not silent: print(" {0} Could not connect to database: {1}".format(Symbol.WARN_YELLOW, e)) @@ -62,20 +63,20 @@ def test_db(self, name): try: self.client.create_database(name) - except influxdb.client.InfluxDBClientError as e: + except InfluxDBClientError as e: print("Error: could not create database: %s" % e) return False try: self.client.switch_database(name) - except influxdb.client.InfluxDBClientError as e: + except InfluxDBClientError as e: print("Error: could not select database: %s" % e) return False # dummy query to test db try: res = self.client.query('show series') - except influxdb.client.InfluxDBClientError as e: + except InfluxDBClientError as e: print("Error: could not query database: %s" % e) return False @@ -157,7 +158,7 @@ def write_series(self, measurement, tags, fields, time_and_values): if body: try: self.client.write_points(body, time_precision='s') - except influxdb.client.InfluxDBClientError as e: + except InfluxDBClientError as e: raise Exception("Cannot insert in {0} series: {1}".format(measurement, e)) else: raise ValueError("Measurement {0} did not contain any non-null value".format(measurement)) @@ -180,7 +181,7 @@ def validate_record(self, name, fields): try: res = self.client.query("SELECT COUNT(\"{0}\") FROM \"{1}\"".format(field, name)) assert len(res) >= 0 - except influxdb.client.InfluxDBClientError as e: + except InfluxDBClientError as e: raise Exception(str(e)) except Exception as e: raise Exception("Field \"{}\" in measurement {} doesn't exist. (May happen if original data contains only NaN entries)".format(field, name)) From 6a059d8ffded4ca8f9104aa84b66b46917348696 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 5 Apr 2017 21:19:58 +0200 Subject: [PATCH 22/41] Replaced no-op string statement with code-comment. Using a string literal here leads to incorrect exception messages when the key "group_fields" does not exist (or any other exception on the preceding line). Using a proper code-comment remedies this. --- munininfluxdb/influxdbclient.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/munininfluxdb/influxdbclient.py b/munininfluxdb/influxdbclient.py index e2fc3b2..e98b7a3 100644 --- a/munininfluxdb/influxdbclient.py +++ b/munininfluxdb/influxdbclient.py @@ -217,19 +217,17 @@ def _upload_and_validate(measurement, tags, fields, packed_values): print(" {0} Connection to database \"{1}\" OK".format(Symbol.OK_GREEN, self.settings.influxdb['database'])) if self.settings.influxdb['group_fields']: - """ - In "group_fields" mode, all fields of a same plugin (ex: system, user, nice, idle... of CPU usage) - will be represented as columns of the same time series in InfluxDB. - - Schema will be: - +----------------------+-------+----------+----------+-----------+ - | time_series_name | col_0 | col_1 | col_2 | col_3 ... | - +----------------------+-------+----------+----------+-----------+ - | domain.host.plugin | time | metric_1 | metric_2 | metric_3 | - | acadis.org.tesla.cpu | time | system | user | nice | - | ... | | | | | - +----------------------+-------+----------+----------+-----------+ - """ + # In "group_fields" mode, all fields of a same plugin (ex: system, user, nice, idle... of CPU usage) + # will be represented as columns of the same time series in InfluxDB. + + # Schema will be: + # +----------------------+-------+----------+----------+-----------+ + # | time_series_name | col_0 | col_1 | col_2 | col_3 ... | + # +----------------------+-------+----------+----------+-----------+ + # | domain.host.plugin | time | metric_1 | metric_2 | metric_3 | + # | acadis.org.tesla.cpu | time | system | user | nice | + # | ... | | | | | + # +----------------------+-------+----------+----------+-----------+ for domain, host, plugin in self.settings.iter_plugins(): _plugin = self.settings.domains[domain].hosts[host].plugins[plugin] measurement = plugin From 49d7ecf5dc7d883d97a140395f2ff9d6d6a7c2b3 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 11 Apr 2017 18:15:54 +0200 Subject: [PATCH 23/41] Added a bunch of tests. --- munininfluxdb/test/test_influxdbclient.py | 265 ++++++++++++++++++++++ munininfluxdb/test/test_rfetch.py | 10 + munininfluxdb/test/test_rrd.py | 246 ++++++++++++++++++++ 3 files changed, 521 insertions(+) create mode 100644 munininfluxdb/test/test_influxdbclient.py create mode 100644 munininfluxdb/test/test_rfetch.py create mode 100644 munininfluxdb/test/test_rrd.py diff --git a/munininfluxdb/test/test_influxdbclient.py b/munininfluxdb/test/test_influxdbclient.py new file mode 100644 index 0000000..4b96a6e --- /dev/null +++ b/munininfluxdb/test/test_influxdbclient.py @@ -0,0 +1,265 @@ +import unittest + +from munininfluxdb.influxdbclient import InfluxdbClient + +from . import mock + + +@unittest.skipUnless(mock, "unittest.mock is not available.") +class TestInfluxDBClient(unittest.TestCase): + + def setUp(self): + self.patcher = mock.patch('munininfluxdb.influxdbclient.influxdb') + self.__influxdb = self.patcher.start() + self.mock_settings = mock.MagicMock() + self.mock_settings.influxdb = { + 'database': 'foo', + 'host': 'host', + 'port': 123, + 'user': 'user', + 'password': 'password', + } + self.mock_settings.interactive = False + + def tearDown(self): + self.patcher.stop() + + def test_connect(self): + client = InfluxdbClient(self.mock_settings) + result = client.connect() + self.__influxdb.assert_has_calls([ + mock.call.InfluxDBClient('host', 123, 'user', 'password'), + mock.call.InfluxDBClient().get_list_database(), + mock.call.InfluxDBClient().switch_database('foo'), + ]) + self.assertIsNotNone(client.client) + self.assertTrue(result) + + def test_connect_default_db(self): + ''' + If the "database" setting in the settings is empty, use the default DB. + ''' + self.mock_settings.influxdb['database'] = '' + client = InfluxdbClient(self.mock_settings) + result = client.connect() + self.__influxdb.assert_has_calls([ + mock.call.InfluxDBClient('host', 123, 'user', 'password'), + mock.call.InfluxDBClient().get_list_database(), + ]) + self.assertIsNotNone(client.client) + self.assertTrue(result) + + def test_test_db_existing(self): + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.get_list_database().__contains__.return_value = True + client.test_db('bla') + client.client.assert_has_calls([ + mock.call.switch_database('bla'), + ]) + + def test_test_db_nonexisting(self): + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.get_list_database().__contains__.return_value = False + client.test_db('bla') + client.client.assert_has_calls([ + mock.call.create_database('bla'), + mock.call.switch_database('bla'), + ]) + + def test_test_db_error_on_create(self): + from influxdb.client import InfluxDBClientError as IE + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.create_database.side_effect = IE('foo') + result = client.test_db('bla') + self.assertFalse(result) + + def test_test_db_error_on_switch(self): + from influxdb.client import InfluxDBClientError as IE + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.switch_database.side_effect = IE('foo') + result = client.test_db('bla') + self.assertFalse(result) + + def test_test_db_error_on_query(self): + from influxdb.client import InfluxDBClientError as IE + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.query.side_effect = IE('foo') + result = client.test_db('bla') + self.assertFalse(result) + + def test_list_db(self): + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.get_list_database.return_value = [ + {'name': 'db1'}, + {'name': 'db2'}, + ] + with mock.patch('munininfluxdb.influxdbclient.print') as p: + result = client.list_db() + self.assertEqual(len(p.mock_calls), 3) + self.assertIsNone(result) + + def test_list_series(self): + client = InfluxdbClient(self.mock_settings) + client.connect() + token = object() + client.client.get_list_series.return_value = token + result = client.list_series() + self.assertEqual(result, token) + client.client.get_list_series.assert_called_with() + + def test_list_columns(self): + client = InfluxdbClient(self.mock_settings) + client.connect() + client.client.query.return_value = [{ + 'a': 1, + 'points': 123, + 'columns': ['foo', 'time', 'sequence_number'] + }] + + expected = [{ + 'a': 1, + 'columns': ['foo'] + }] + result = client.list_columns() + client.client.query.assert_called_with('SELECT * FROM "/.*/" LIMIT 1') + self.assertEqual(result, expected) + + def test_prompt_setup(self): + self.skipTest('Unittesting interactive prompts is a bit hairy. ' + 'Skipping for now.') + + def test_write_series(self): + # TODO The exceptions in this function are not yet tested. + client = InfluxdbClient(self.mock_settings) + client.connect() + + measurement = 'measurement' + tags = ['tag1', 'tag2'] + fields = ['field1', 'column1', 'column2'] + timeval1 = 'the-time-value' + timeval2 = 'col-1-value' + timeval3 = 'col-2-value' + time_and_values = [ + [timeval1, timeval2, timeval3] + ] + + expected_payload = { + 'fields': {'column1': 'col-1-value', 'column2': 'col-2-value'}, + 'tags': ['tag1', 'tag2'], + 'time': 'the-time-value', + 'measurement': 'measurement' + } + result = client.write_series( + measurement, tags, fields, time_and_values) + + client.client.write_points.assert_called_with( + [expected_payload], time_precision='s') + self.assertIsNone(result) + + def test_validate_record(self): + # TODO The exceptions in this function are not yet tested. + client = InfluxdbClient(self.mock_settings) + client.connect() + + # Query is called multiple times, using side_effect to define + # return_values for each call. + client.client.query.side_effect = [ + ['a', 'b', 'c'], # SHOW MEASUREMENT query + [10], # COUNT('field1') + [20], # COUNT('field2') + ] + + client.validate_record('the-name', ['field1', 'field2']) + + client.client.query.assert_has_calls([ + mock.call('SHOW MEASUREMENTS WITH MEASUREMENT="the-name"'), + mock.call('SELECT COUNT("field1") FROM "the-name"'), + mock.call('SELECT COUNT("field2") FROM "the-name"'), + ]) + + def test_import_from_xml(self): + # --- Prepare the "settings" for this test ------ + self.mock_settings.influxdb['group_fields'] = 'a' + self.mock_settings.iter_plugins.return_value = [ + ('a', 'b', 'c'), + ('d', 'e', 'f'), + ] + plugin_c = mock.MagicMock( + name='plugin_c', + fields={'field_ca': mock.MagicMock(rrd_exported=True)} + ) + plugin_f = mock.MagicMock( + name='plugin_f', + fields={'field_fa': mock.MagicMock(rrd_exported=True)} + ) + self.mock_settings.domains = { + 'a': mock.MagicMock( + hosts={ + 'b': mock.MagicMock( + plugins={'c': plugin_c} + ) + } + ), + 'd': mock.MagicMock( + hosts={ + 'e': mock.MagicMock( + plugins={'f': plugin_f} + ) + } + ), + } + + # --- Create the testing instance. --------- + client = InfluxdbClient(self.mock_settings) + client.connect() + + # --- Mocking ----------- + # We'll mock out "write_series". It's tested in another test. + # + # ProgressBar and read_xml_file are externals and we don't want to + # execute them. + # + # We'll also mock out "print". This will remove stdout during testing. + # If we wanted, we could assign the patch result to a variable and check + # the calls. We don't do this as it's end-user "display" stuff and not + # relevant to the underlying logic. + with mock.patch('munininfluxdb.influxdbclient.print'), \ + mock.patch('munininfluxdb.influxdbclient.read_xml_file') as rxml, \ + mock.patch('munininfluxdb.influxdbclient.ProgressBar'), \ + mock.patch('munininfluxdb.influxdbclient.InfluxdbClient.write_series') as ptch: + + rxml.return_value = mock.MagicMock() + rxml().items.return_value = [('key', 'value')] + client.import_from_xml() + + ptch.assert_has_calls([ + mock.call( + 'c', + { + 'domain': 'a', + 'is_multigraph': True, + 'host': 'b', + 'plugin': 'c' + }, + ['time', 'field_ca'], + [['key', 'value']]), + mock.call( + 'f', + { + 'domain': 'd', + 'is_multigraph': True, + 'host': 'e', + 'plugin': 'f' + }, + ['time', 'field_fa'], + [['key', 'value']]) + ]) + + def test_import_from_xml_folder(self): + self.skipTest('Deprecated') diff --git a/munininfluxdb/test/test_rfetch.py b/munininfluxdb/test/test_rfetch.py new file mode 100644 index 0000000..1a02689 --- /dev/null +++ b/munininfluxdb/test/test_rfetch.py @@ -0,0 +1,10 @@ +import unittest + +from . import mock + + +@unittest.skipUnless(mock, "unittest.mock is not available.") +class TestRFetch(unittest.TestCase): + + def test_main(self): + self.skipTest('This function does not seem to do anything useful!') diff --git a/munininfluxdb/test/test_rrd.py b/munininfluxdb/test/test_rrd.py new file mode 100644 index 0000000..4a5ddf4 --- /dev/null +++ b/munininfluxdb/test/test_rrd.py @@ -0,0 +1,246 @@ +import unittest + +import munininfluxdb.rrd as rrd + +from . import mock + + +class MockRRA: + + def __init__(self, pdp_per_row): + self.pdp_per_row = pdp_per_row + + +@unittest.skipUnless(mock, "unittest.mock is not available.") +class TestRFetch(unittest.TestCase): + + def setUp(self): + self.os_patcher = mock.patch('munininfluxdb.rrd.os') + self.sp_patcher = mock.patch('munininfluxdb.rrd.subprocess') + self.pb_patcher = mock.patch('munininfluxdb.rrd.ProgressBar') + self.print_patcher = mock.patch('munininfluxdb.rrd.print') + self.__os = self.os_patcher.start() + self.__subprocess = self.sp_patcher.start() + self.__ProgressBar = self.pb_patcher.start() + self.__print = self.print_patcher.start() + + def tearDown(self): + self.os_patcher.stop() + self.sp_patcher.stop() + self.pb_patcher.stop() + self.print_patcher.stop() + + def test_read_xml_file(self): + with mock.patch('munininfluxdb.rrd.ET') as mock_et: + mock_tree = mock.MagicMock(name='tree') + mock_et.parse.return_value = mock_tree + mock_root = mock.MagicMock(name='root') + mock_root.find.side_effect = [ + mock.MagicMock(text='1000'), # find('lastupdate') + mock.MagicMock(text='5'), # find('step') + ] + rra1 = mock.MagicMock(name='rra1') + rra1.find.side_effect = [ + mock.MagicMock(text='3'), # find('pdp_per_row') + [4, 5], # find('database') + ] + rra1.findall.return_value = [mock.MagicMock(text='10.0')] + rra2 = mock.MagicMock(name='rra2') + rra2.find.side_effect = [ + mock.MagicMock(text='5'), # find('pdp_per_row') + [4, 5, 6], # find('database') + ] + rra2.findall.return_value = [mock.MagicMock(text='15.0')] + mock_root.findall.side_effect = [ + ['ds1'], # findall('ds') + [rra1, rra2] # findall('rra') + ] + mock_tree.getroot.return_value = mock_root + + result = rrd.read_xml_file('myfilename', keep_average_only=False) + mock_tree.getroot.assert_called_with() + mock_root.assert_has_calls([ + mock.call.find('lastupdate'), + mock.call.find('step'), + mock.call.findall('ds'), + mock.call.findall('rra'), + ]) + + rra1.assert_has_calls([ + mock.call.find('pdp_per_row'), + mock.call.find('database'), + mock.call.findall('./database/row/v'), + ]) + + rra2.assert_has_calls([ + mock.call.find('pdp_per_row'), + mock.call.find('database'), + mock.call.findall('./database/row/v'), + ]) + + expected = { + 950: 15.0, + 975: 10.0, + } + self.assertEqual(dict(result), expected) + + def test_export_to_xml(self): + field1 = mock.MagicMock(name='field1', rrd_filename='rrdfile1.rrd', + xml_filename='xmlfile1') + field2 = mock.MagicMock(name='field2', rrd_filename='rrdfile2.rrd', + xml_filename='xmlfile2') + mock_settings = mock.MagicMock( + paths={ + 'xml': '/path/to/xml' + }, + domains={ + 'domain': mock.MagicMock( + hosts={ + 'host': mock.MagicMock( + plugins={ + 'plugin': mock.MagicMock( + fields={ + 'field1': field1, + 'field2': field2, + } + ) + } + ) + } + ) + } + ) + mock_settings.iter_fields.return_value = [ + ('domain', 'host', 'plugin', 'field1'), + ('domain', 'host', 'plugin', 'field2'), + ] + + rrd.export_to_xml(mock_settings) + + self.__subprocess.check_call.assert_has_calls([ + mock.call(['rrdtool', 'dump', 'rrdfile1.rrd', 'xmlfile1']), + mock.call(['rrdtool', 'dump', 'rrdfile2.rrd', 'xmlfile2']), + ], any_order=True) + self.__os.makedirs.assert_called_with('/path/to/xml') + + def test_export_to_xml_in_folder(self): + self.__os.listdir.return_value = [ + 'file1.rrd', + 'file2.rrd', + 'file3', + ] + self.__os.path.join.side_effect = lambda *x: '/'.join(x) + rrd.export_to_xml_in_folder('/path/to/source') + + self.skipTest('The function under test does not seem to be implemented ' + 'correctly. Test skipped for now.') + # TODO these calls don't look correct, but that's what's currently + # implemented. For now, I'm focussing on creating tests, not the + # correctness of the code! This needs to be fixed before re-enabling the + # test! + self.__subprocess.check_call.assert_has_calls([ + mock.call(['rrdtool', 'dump', '/path/to/source///path/to/source/file1.rrd', '/tmp/munin-influxdb/xml/-/path/to/source/file1.xml']), + mock.call(['rrdtool', 'dump', '/path/to/source///path/to/source/file2.rrd', '/tmp/munin-influxdb/xml/-/path/to/source/file2.xml']), + ], any_order=True) + + def test_discover_from_rrd(self): + field = mock.MagicMock( + name='field', + rrd_filename='rrdfile1.rrd', + xml_filename='xmlfile1' + ) + plugin = mock.MagicMock(fields={ + 'field': field, + }) + mock_settings = mock.MagicMock( + nb_rrd_files=0, + paths={ + 'xml': '/path/to/xml', + 'munin': '/path/to/munin', + }, + domains={ + 'domain': mock.MagicMock( + hosts={ + 'host': mock.MagicMock( + plugins={ + 'plugin': plugin + } + ) + } + ) + } + ) + mock_settings.iter_fields.return_value = [ + ('domain', 'host', 'plugin', 'field1'), + ('domain', 'host', 'plugin', 'field2'), + ] + + # --- prime the os mock ---------- + self.__os.listdir.return_value = [ + 'file1', + 'host-plugin-field-d.rrd', + 'domain', + ] + self.__os.path.isdir.side_effect = lambda x: x == '/path/to/munin/domain' + self.__os.path.join.side_effect = lambda *x: '/'.join(x) + self.__os.path.splitext.return_value = ('host-plugin-field-d', 'rrd') + # -------------------------------- + + result = rrd.discover_from_rrd(mock_settings) + self.assertEqual(result, mock_settings) + self.__os.listdir.assert_has_calls([ + mock.call('/path/to/munin'), + mock.call('/path/to/munin/domain'), + ], any_order=True) + + # This check verifies that the mocked file structure above leads to the + # expected settings + expected = { + 'rrd_found': True, + 'rrd_filename': '/path/to/munin/domain/host-plugin-field-d.rrd', + 'xml_filename': '/path/to/xml/domain/host-plugin-field-d.xml', + 'settings': {'type': 'DERIVE'}, + } + field_result = { + 'rrd_found': plugin.fields['field'].rrd_found, + 'rrd_filename': plugin.fields['field'].rrd_filename, + 'xml_filename': plugin.fields['field'].xml_filename, + 'settings': plugin.fields['field'].settings, + } + self.assertEqual(field_result, expected) + + def test_check_rrd_files(self): + field1 = mock.MagicMock(name='field1', rrd_filename='rrdfile1.rrd', + xml_filename='xmlfile1') + field2 = mock.MagicMock(name='field2', rrd_filename='rrdfile2.rrd', + xml_filename='xmlfile2') + mock_settings = mock.MagicMock( + nb_rrd_files=0, + paths={ + 'xml': '/path/to/xml' + }, + domains={ + 'domain': mock.MagicMock( + hosts={ + 'host': mock.MagicMock( + plugins={ + 'plugin': mock.MagicMock( + fields={ + 'field1': field1, + 'field2': field2, + } + ) + } + ) + } + ) + } + ) + mock_settings.iter_fields.return_value = [ + ('domain', 'host', 'plugin', 'field1'), + ('domain', 'host', 'plugin', 'field2'), + ] + + rrd.check_rrd_files(mock_settings) + + self.assertEqual(mock_settings.nb_rrd_files, 2) From 0ffe3d88ae8ff4ea29a407a668f309f10c44a0e1 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 11 Apr 2017 18:16:45 +0200 Subject: [PATCH 24/41] Add logging to unhide some tracebacks. --- munininfluxdb/influxdbclient.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/munininfluxdb/influxdbclient.py b/munininfluxdb/influxdbclient.py index e98b7a3..9339f8b 100644 --- a/munininfluxdb/influxdbclient.py +++ b/munininfluxdb/influxdbclient.py @@ -1,6 +1,7 @@ from __future__ import print_function import os import json +import logging from collections import defaultdict from pprint import pprint @@ -17,6 +18,9 @@ from rrd import read_xml_file from settings import Settings + +LOG = logging.getLogger(__name__) + class InfluxdbClient: def __init__(self, settings): self.client = None @@ -39,6 +43,7 @@ def connect(self, silent=False): if not silent: print(" {0} Could not connect to database: {1}".format(Symbol.WARN_YELLOW, e)) except Exception as e: + LOG.debug(str(e), exc_info=True) print("Error: %s" % e) self.client, self.valid = None, False else: From 7b425a41db84d450adc0d3b377ed9ed228eee186 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 11 Apr 2017 18:17:13 +0200 Subject: [PATCH 25/41] Add TODO markers. --- munininfluxdb/rrd.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/munininfluxdb/rrd.py b/munininfluxdb/rrd.py index e48dd26..a2e08df 100644 --- a/munininfluxdb/rrd.py +++ b/munininfluxdb/rrd.py @@ -91,12 +91,14 @@ def export_to_xml_in_folder(source, destination=Defaults.MUNIN_XML_FOLDER): Converts all *.rrd files in source folder """ assert os.path.exists(source) + # TODO This try/except should be replaced with "if os.path.exists()" try: os.makedirs(destination) except OSError as e: if e.errno != errno.EEXIST: raise + # TODO Domain is always empty? This leads to empty folder notes in the "join" calls below! Does this function actually work? filelist = [("", os.path.join(source, file)) for file in os.listdir(source) if file.endswith(".rrd")] nb_files = len(filelist) progress_bar = ProgressBar(nb_files) From 1130b4747fbb2ab56d1b0ae9e4616652dbbd9eac Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 11 Apr 2017 18:26:55 +0200 Subject: [PATCH 26/41] Add test for grafana panel "to_json" method. --- munininfluxdb/test/test_grafana.py | 44 +++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py index 654b48b..2626f39 100644 --- a/munininfluxdb/test/test_grafana.py +++ b/munininfluxdb/test/test_grafana.py @@ -221,5 +221,47 @@ def test_process_graph_types_colours(self): }) self.assertIsNone(result) + @unittest.skipUnless(mock, "unittest.mock is not available.") def test_to_json(self): - self.skipTest('TODO') + mock_settings = mock.MagicMock() + mock_settings.influxdb = { + 'database': 'the-database' + } + mock_settings.grafana = { + 'show_minmax': True, + } + # Add a dummy query so we get a value to check against below + query = mock.MagicMock() + query.to_json.return_value = {'1': '2'} + self.panel.queries = [query] + + expected = { + 'aliasColors': {}, + 'datasource': 'the-database', + 'fill': 0, + 'grid': {}, + 'leftYAxisLabel': None, + 'legend': {'alignAsTable': True, + 'avg': True, + 'current': True, + 'max': True, + 'min': True, + 'rightSide': False, + 'show': True, + 'total': False, + 'values': True}, + 'linewidth': 1, + 'seriesOverrides': [], + 'span': 6, + 'stack': False, + 'targets': [{'1': '2'}], # from the dummy query above + 'title': 'Hello', + 'tooltip': {'shared': False, 'value_type': 'individual'}, + 'type': 'graph', + 'xaxis': {'show': True}, + 'yaxes': [{'format': 'short', 'label': None, 'logBase': 1}, + {'format': 'short', 'label': None, 'logBase': 1}]} + + result = self.panel.to_json(mock_settings) + + self.assertEqual(result, expected) From aa734ae5eb1305319c9fb558b7197af7f8573321 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 11 Apr 2017 18:47:55 +0200 Subject: [PATCH 27/41] Additional grafana tests --- munininfluxdb/test/test_grafana.py | 46 ++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py index 2626f39..7a7d48e 100644 --- a/munininfluxdb/test/test_grafana.py +++ b/munininfluxdb/test/test_grafana.py @@ -265,3 +265,49 @@ def test_to_json(self): result = self.panel.to_json(mock_settings) self.assertEqual(result, expected) + + +class TestHeaderPanel(unittest.TestCase): + + def setUp(self): + self.panel = gf.HeaderPanel(title="Hello") + + def test_to_json(self): + expected = { + "title": "Hello", + "mode": "html", + "type": "text", + "editable": True, + "span": 12, + "links": [{ + "type": "absolute", + "title": "Fork me on GitHub!", + "url": "https://github.com/mvonthron/munin-influxdb", + }], + "content": "" + } + + result = self.panel.to_json(None) + + self.assertEqual(result, expected) + + +class TestRow(unittest.TestCase): + + def test_add_panel(self): + row = gf.Row('Hello') + self.assertEqual(row.panels, []) + result = row.add_panel() + self.assertEqual(len(row.panels), 1) + self.assertIsInstance(result, gf.Panel) + + def test_to_json(self): + row = gf.Row('Hello') + result = row.to_json(None) + expected = { + "title": 'Hello', + "height": '250px', + "panels": [], + "showTitle": True + } + self.assertEqual(result, expected) From 8667971277ff8c3bc0dd2c6107f2b28d380934e3 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 12 Apr 2017 08:28:39 +0200 Subject: [PATCH 28/41] Add tests for GrafanaApi --- munininfluxdb/test/test_grafana.py | 87 ++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py index 7a7d48e..e6fa3e0 100644 --- a/munininfluxdb/test/test_grafana.py +++ b/munininfluxdb/test/test_grafana.py @@ -311,3 +311,90 @@ def test_to_json(self): "showTitle": True } self.assertEqual(result, expected) + + +@unittest.skipUnless(mock, "unittest.mock is not available.") +class TestGrafanaAPI(unittest.TestCase): + + def setUp(self): + self.requests_patcher = mock.patch('munininfluxdb.grafana.requests') + self.__requests = self.requests_patcher.start() + + def tearDown(self): + self.requests_patcher.stop() + + def test_test_host(self): + self.__requests.get.return_value = mock.MagicMock(status_code=401) + result = gf.GrafanaApi.test_host('foo') + self.assertTrue(result) + self.__requests.get.assert_called_with('foo/api/org') + + def test_test_auth(self): + self.__requests.get.return_value = mock.MagicMock(status_code=200) + result = gf.GrafanaApi.test_auth('foo', 'auth') + self.assertTrue(result) + self.__requests.get.assert_called_with('foo/api/org', auth='auth') + + def test_create_datasource(self): + mock_settings = mock.MagicMock( + grafana={ + 'auth': 'val-auth', + 'host': 'val-host', + 'access': 'val-access', + }, + influxdb={ + 'host': 'val-influxdb-host', + 'port': 1234, + 'user': 'val-influxdb-user', + 'password': 'val-influxdb-passwd', + } + ) + + expected_payload = { + "name": "dsname", + "database": "dbname", + "type": "influxdb", + "url": "http://val-influxdb-host:1234", + "user": "val-influxdb-user", + "password": "val-influxdb-passwd", + "access": "val-access", + "basicAuth": False + } + + self.__requests.post.return_value = mock.MagicMock(ok=True) + + api = gf.GrafanaApi(mock_settings) + result = api.create_datasource('dsname', 'dbname') + + self.__requests.post.assert_called_with( + 'val-host/api/datasources', + auth='val-auth', + json=expected_payload + ) + + self.assertTrue(result) + + def test_create_dashboard(self): + + mock_settings = mock.MagicMock( + grafana={ + 'auth': 'val-auth', + 'host': 'val-host', + }, + ) + + mock_response = mock.MagicMock(ok=True) + mock_response.json.return_value = {'slug': 'val-slug'} + self.__requests.post.return_value = mock_response + + request_data = {'dashboard': {'foo': 'bar'}} + + api = gf.GrafanaApi(mock_settings) + result = api.create_dashboard({'foo': 'bar'}) + + self.__requests.post.assert_called_with( + 'val-host/api/dashboards/db', + auth='val-auth', + json=request_data) + + self.assertEqual(result, 'val-host/dashboard/db/val-slug') From 38f7788233ee72582396f8a70514db004a771c5c Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 19 Apr 2017 08:02:45 +0200 Subject: [PATCH 29/41] Add setup.py extra requirements for development. --- setup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.py b/setup.py index 3f92cbf..bfc67b7 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,10 @@ 'requests', 'storable >= 1.0.0', ], + extras_require={ + 'dev': ['pytest-cov'], + 'test': ['pytest', 'mock'], + }, packages=find_packages(), classifiers=[ 'Development Status :: 4 - Beta', From 018a253c84083f9a2055c197ccdc43834f8d7c78 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 19 Apr 2017 08:35:06 +0200 Subject: [PATCH 30/41] Omit unit-tests from coverage. --- .coveragerc | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..eda1347 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,4 @@ +[run] +omit = + munininfluxdb/test/* + From 816a2f22f1ab30df8755c879e390f9343e18c290 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Mon, 24 Apr 2017 08:16:01 +0200 Subject: [PATCH 31/41] Added tests for dashboard creation. --- munininfluxdb/test/test_grafana.py | 154 +++++++++++++++++++++++++++++ 1 file changed, 154 insertions(+) diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py index e6fa3e0..aea659b 100644 --- a/munininfluxdb/test/test_grafana.py +++ b/munininfluxdb/test/test_grafana.py @@ -313,6 +313,160 @@ def test_to_json(self): self.assertEqual(result, expected) +class TestDashboard(unittest.TestCase): + + def setUp(self): + self.mock_settings = mock.MagicMock( + grafana={ + 'auth': 'val-auth', + 'host': 'val-host', + 'access': 'val-access', + 'title': 'DBTitle', + 'tags': ['tag1', 'tag2'], + 'graph_per_row': 5, + 'show_minmax': True, + }, + influxdb={ + 'host': 'val-influxdb-host', + 'port': 1234, + 'user': 'val-influxdb-user', + 'password': 'val-influxdb-passwd', + 'database': 'dbname', + }, + domains={ + 'domain1': mock.MagicMock( + name='mock-domain1', + hosts={ + 'host1': mock.MagicMock( + name='mock-host1', + plugins={ + 'plugin1': mock.MagicMock( + name='mock-plugin1', + settings={'graph_title': 'Plugin1'}, + fields={ + 'field1': mock.MagicMock( + name='mock-field1') + } + ), + } + ) + } + ) + } + ) + self.dash = gf.Dashboard(self.mock_settings) + + def test_generate_simple(self): + self.skipTest('Dashboard.generate_simple does not work as defined!') + + def test_prompt_setup(self): + self.skipTest('Testing interactive prompts is cumbersome. ' + 'Skipping for now!') + + def test_add_header(self): + mock_settings = mock.MagicMock( + influxdb={'a': 1, 'b': 2} + ) + self.assertEqual(len(self.dash.rows), 0) + self.dash.add_header(mock_settings) + self.assertEqual(len(self.dash.rows), 1) + created_panels = self.dash.rows[0].panels + self.assertEqual(len(created_panels), 1) + self.assertIsInstance(created_panels[0], gf.HeaderPanel) + self.assertEqual(created_panels[0].title, + 'Welcome to your new dashboard!') + + def test_add_row(self): + self.assertEqual(len(self.dash.rows), 0) + self.dash.add_row(title="Hello World!") + self.assertEqual(len(self.dash.rows), 1) + self.assertIsInstance(self.dash.rows[0], gf.Row) + self.assertEqual(self.dash.rows[0].title, 'Hello World!') + + def test_to_json(self): + settings = mock.MagicMock() + result = self.dash.to_json(settings) + expected = { + 'id': None, + 'title': 'DBTitle', + 'tags': ['tag1', 'tag2'], + 'rows': [], + 'timezone': 'browser', + 'time': {'from': 'now-5d', 'to': 'now'}, + } + settings.assert_not_called() + self.assertEqual(result, expected) + + def test_save(self): + from io import BytesIO + fakefile = BytesIO() + with mock.patch('munininfluxdb.grafana.open') as mock_open, \ + mock.patch('munininfluxdb.grafana.json') as mock_json: + mock_open.return_value = fakefile + expected_json_content = self.dash.to_json(None) + self.dash.save('/tmp/foo.json') + mock_json.dump.assert_called_with(expected_json_content, fakefile) + + def test_upload(self): + with mock.patch('munininfluxdb.grafana.GrafanaApi') as mck_api: + self.dash.upload() + mck_api.assert_called_with(self.mock_settings) + mck_api().create_datasource.assert_called_with('dbname', 'dbname') + json_content = self.dash.to_json(None) + mck_api().create_dashboard.assert_called_with(json_content) + + def test_generate(self): + self.maxDiff = None + + self.assertEqual(self.dash.rows, []) + + self.dash.generate() + + self.assertEqual(len(self.dash.rows), 2) # Panel + 1 Plugin + row_1, row_2 = self.dash.rows + self.assertIsInstance(row_1.panels[0], gf.HeaderPanel) + + self.assertEqual(len(row_1.panels), 1) + expected = { + 'aliasColors': {}, + 'datasource': 'dbname', + 'fill': 0, + 'grid': {}, + 'leftYAxisLabel': None, + 'legend': { + 'alignAsTable': True, + 'avg': True, + 'current': True, + 'max': True, + 'min': True, + 'rightSide': False, + 'show': True, + 'total': False, + 'values': True}, + 'linewidth': 1, + 'seriesOverrides': [], + 'span': 2, + 'stack': False, + 'targets': [{ + 'alias': 'field1', + 'dsType': 'influxdb', + 'groupBy': [{'params': ['$interval'], 'type': 'time'}, + {'params': ['null'], 'type': 'fill'}], + 'measurement': 'plugin1', + 'resultFormat': 'time_series', + 'select': [[{'params': ['field1'], 'type': 'field'}, + {'params': [], 'type': 'mean'}]] + }], + 'title': 'Plugin1', + 'tooltip': {'shared': False, 'value_type': 'individual'}, + 'type': 'graph', + 'xaxis': {'show': True}, + 'yaxes': [{'format': 'short', 'label': None, 'logBase': 1}, + {'format': 'short', 'label': None, 'logBase': 1}]} + result_json = row_2.panels[0].to_json(self.mock_settings) + self.assertEqual(result_json, expected) + + @unittest.skipUnless(mock, "unittest.mock is not available.") class TestGrafanaAPI(unittest.TestCase): From 83f390a7ca1005a951bbedd4a7d2ec981f76771e Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Mon, 24 Apr 2017 08:25:29 +0200 Subject: [PATCH 32/41] Fill in test for "Dashboard.generate_simple" --- munininfluxdb/test/test_grafana.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/munininfluxdb/test/test_grafana.py b/munininfluxdb/test/test_grafana.py index aea659b..bfdb4be 100644 --- a/munininfluxdb/test/test_grafana.py +++ b/munininfluxdb/test/test_grafana.py @@ -357,7 +357,19 @@ def setUp(self): self.dash = gf.Dashboard(self.mock_settings) def test_generate_simple(self): - self.skipTest('Dashboard.generate_simple does not work as defined!') + structure = [ + {'name': 'the-name', 'fields': ['col1', 'col2']} + ] + result = gf.Dashboard.generate_simple(self.mock_settings, structure) + self.assertEqual(len(result.rows), 1) + row = result.rows[0] + self.assertEqual(len(row.panels), 1) + panel = row.panels[0] + q1, q2 = panel.queries + self.assertEqual(q1.field, 'col1') + self.assertEqual(q1.measurement, 'the-name') + self.assertEqual(q2.field, 'col2') + self.assertEqual(q2.measurement, 'the-name') def test_prompt_setup(self): self.skipTest('Testing interactive prompts is cumbersome. ' From 9f2d225cf817fadffe3b2b83b2a5d192123c036b Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Mon, 24 Apr 2017 08:25:53 +0200 Subject: [PATCH 33/41] Exclude development code from test-coverage --- munininfluxdb/grafana.py | 2 +- munininfluxdb/influxdbclient.py | 2 +- munininfluxdb/munin.py | 2 +- munininfluxdb/rfetch.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/munininfluxdb/grafana.py b/munininfluxdb/grafana.py index c666866..9efe12d 100644 --- a/munininfluxdb/grafana.py +++ b/munininfluxdb/grafana.py @@ -380,7 +380,7 @@ def create_dashboard(self, dashboardJson): r.raise_for_status() -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # main for dev/debug purpose only dashboard = Dashboard("Munin") diff --git a/munininfluxdb/influxdbclient.py b/munininfluxdb/influxdbclient.py index 9339f8b..8fcd830 100644 --- a/munininfluxdb/influxdbclient.py +++ b/munininfluxdb/influxdbclient.py @@ -380,7 +380,7 @@ def get_settings(self): return self.settings -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # main used for dev/debug purpose only, use "import" class MockSettings: influxdb = parse_handle("root@192.168.1.100:8086/db/munin") diff --git a/munininfluxdb/munin.py b/munininfluxdb/munin.py index 5c0663c..e13748f 100644 --- a/munininfluxdb/munin.py +++ b/munininfluxdb/munin.py @@ -190,7 +190,7 @@ def read_state_file(filename): print("{0} Error: could read state file {1}: {2}".format(Symbol.NOK_RED, filename, e)) -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # main() for dev/debug only settings = discover_from_datafile("../data/datafile") # acadis.org;tesla:if_eth0.up.info diff --git a/munininfluxdb/rfetch.py b/munininfluxdb/rfetch.py index bd793fd..66212e7 100644 --- a/munininfluxdb/rfetch.py +++ b/munininfluxdb/rfetch.py @@ -61,5 +61,5 @@ def main(): pprint(content) -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover main() From 7480077adbcd124176f20ce2a1db1adf5c7e6340 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 25 Apr 2017 07:56:28 +0200 Subject: [PATCH 34/41] Add test for munin.cleanup. --- munininfluxdb/test/test_munin.py | 39 ++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/munininfluxdb/test/test_munin.py b/munininfluxdb/test/test_munin.py index dff7fd0..b2b48e9 100644 --- a/munininfluxdb/test/test_munin.py +++ b/munininfluxdb/test/test_munin.py @@ -3,11 +3,15 @@ import unittest from munininfluxdb.munin import ( + cleanup, generate_filenames, populate_settings, ) from munininfluxdb.settings import Settings +from . import mock + + EXAMPLE_DATA = StringIO(dedent( u'''\ version 2.0.19-3 @@ -132,3 +136,38 @@ def test_generate_filename(self): self.assertEqual(expected_xml_filenames, xml_filenames) self.assertEqual(expected_rrd_filenames, rrd_filenames) + + def test_cleanup(self): + plugins = { + 'mg_plugin1': mock.MagicMock( + fields={ + 'mg_field1': [1, 2, 3] + } + ), + 'mg_plugin2': mock.MagicMock( + fields={ + 'mg_field2': [4, 5, 6] + } + ) + } + mock_settings = mock.MagicMock( + domains={ + 'domain': mock.MagicMock( + hosts={ + 'host': mock.MagicMock( + plugins=plugins + ) + } + ) + } + ) + mock_settings.iter_fields.return_value = [ + ('domain', 'host', 'mg_plugin1.mg_field1', 'field1'), + ('domain', 'host', 'mg_plugin2.mg_field2', 'field2'), + ] + + self.assertTrue('mg_field1' in plugins['mg_plugin1'].fields) + self.assertTrue('mg_field2' in plugins['mg_plugin2'].fields) + cleanup(mock_settings) + self.assertFalse('mg_field1' in plugins['mg_plugin1'].fields) + self.assertFalse('mg_field2' in plugins['mg_plugin2'].fields) From 44a2914bc73bd3d4c4aab5e20f57e8aad596b026 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Tue, 25 Apr 2017 08:03:32 +0200 Subject: [PATCH 35/41] Remove rfetch.py the existing code does not do anything useful. --- munininfluxdb/rfetch.py | 65 ------------------------------- munininfluxdb/test/test_rfetch.py | 10 ----- 2 files changed, 75 deletions(-) delete mode 100644 munininfluxdb/rfetch.py delete mode 100644 munininfluxdb/test/test_rfetch.py diff --git a/munininfluxdb/rfetch.py b/munininfluxdb/rfetch.py deleted file mode 100644 index 66212e7..0000000 --- a/munininfluxdb/rfetch.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import print_function -from pprint import pprint - -try: - import paramiko -except ImportError: - print("SSH library Paramiko missing, needed for remote plugins") - -class MuninRunner: - def __init__(self): - pass - -class HostRunner: - def __init__(self): - pass - - -def main(): - content = {} - - with open("../data/munin-conf/munin.conf") as f: - current_group = {} - current_group_name = "_top_level_" - - for line in f.readlines(): - line = line.strip() - # comment - if line.startswith('#') or not line: - pass - - # group - elif line.startswith('['): - # save old group - content[current_group_name] = current_group - - # init new one - line = line.strip('[]') - splitted = line.split(';') - if len(splitted) == 1: - domain_name = ".".join(line.split('.')[-2:]) - host_name = line - else: - domain_name, host_name = splitted - - current_group = { - 'host': host_name, - 'domain': domain_name - } - current_group_name = line - - # entry - else: - elements = line.split() - if len(elements) > 2: - current_group[elements[0]] = elements[1:] - else: - current_group[elements[0]] = elements[1] - - # save last one - content[current_group_name] = current_group - - pprint(content) - -if __name__ == "__main__": # pragma: no cover - main() diff --git a/munininfluxdb/test/test_rfetch.py b/munininfluxdb/test/test_rfetch.py deleted file mode 100644 index 1a02689..0000000 --- a/munininfluxdb/test/test_rfetch.py +++ /dev/null @@ -1,10 +0,0 @@ -import unittest - -from . import mock - - -@unittest.skipUnless(mock, "unittest.mock is not available.") -class TestRFetch(unittest.TestCase): - - def test_main(self): - self.skipTest('This function does not seem to do anything useful!') From a7179761b3e0b080edf08a72385466fdf4d0ae57 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Wed, 26 Apr 2017 17:00:00 +0200 Subject: [PATCH 36/41] Add tests for "cron" and "dump" commands. --- munininfluxdb/test/commands/__init__.py | 0 munininfluxdb/test/commands/test_cron.py | 72 ++++++++++++++++++++++++ munininfluxdb/test/commands/test_dump.py | 67 ++++++++++++++++++++++ 3 files changed, 139 insertions(+) create mode 100644 munininfluxdb/test/commands/__init__.py create mode 100644 munininfluxdb/test/commands/test_cron.py create mode 100644 munininfluxdb/test/commands/test_dump.py diff --git a/munininfluxdb/test/commands/__init__.py b/munininfluxdb/test/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/munininfluxdb/test/commands/test_cron.py b/munininfluxdb/test/commands/test_cron.py new file mode 100644 index 0000000..de2658e --- /dev/null +++ b/munininfluxdb/test/commands/test_cron.py @@ -0,0 +1,72 @@ +import unittest + +from .. import mock + +import munininfluxdb.commands.cron as m_cron + + +class TestCron(unittest.TestCase): + + def setUp(self): + self.pwd_patcher = mock.patch('munininfluxdb.commands.cron.pwd') + self.os_patcher = mock.patch('munininfluxdb.commands.cron.os') + self.sys_patcher = mock.patch('munininfluxdb.commands.cron.sys') + self.print_patcher = mock.patch('munininfluxdb.commands.cron.print') + + self.__pwd = self.pwd_patcher.start() + self.__os = self.os_patcher.start() + self.__sys = self.sys_patcher.start() + self.__print = self.print_patcher.start() + + def tearDown(self): + self.pwd_patcher.stop() + self.os_patcher.stop() + self.sys_patcher.stop() + self.print_patcher.stop() + + def test_get_cron_user_munin(self): + result = m_cron.get_cron_user() + self.assertEqual(result, 'munin') + + def test_get_cron_user_root(self): + self.__pwd.getpwnam.side_effect = KeyError('token exception') + result = m_cron.get_cron_user() + self.assertEqual(result, 'root') + + def test_uninstall_cron(self): + mock_cron = mock.MagicMock() + fun = m_cron.uninstall_cron(mock_cron) + args = mock.MagicMock( + user='munin' + ) + fun(args) + mock_cron.remove_by_comment.assert_called_with( + 'munin', m_cron.CRON_COMMENT) + + def test_install_cron(self): + mock_cron = mock.MagicMock() + fun = m_cron.install_cron(mock_cron) + args = mock.MagicMock( + user='munin', + period='5' + ) + with mock.patch('munininfluxdb.commands.cron.absolute_executable') as p: + p.return_value = '/foo/bar' + fun(args) + mock_cron.add_with_comment.assert_called_with( + 'munin', '/foo/bar fetch', '5', m_cron.CRON_COMMENT) + + def test_setup(self): + from argparse import ArgumentParser + parser = ArgumentParser() + injections = { + 'cron': mock.MagicMock() + } + m_cron.setup(parser, injections) + + result = parser.parse_args('-u jdoe install -p 10'.split()) + self.assertEqual(result.period, 10) + self.assertEqual(result.user, 'jdoe') + + result2 = parser.parse_args('-u jdoe uninstall'.split()) + self.assertEqual(result2.user, 'jdoe') diff --git a/munininfluxdb/test/commands/test_dump.py b/munininfluxdb/test/commands/test_dump.py new file mode 100644 index 0000000..fa7f293 --- /dev/null +++ b/munininfluxdb/test/commands/test_dump.py @@ -0,0 +1,67 @@ +import unittest +from pprint import pprint + +from .. import mock + +import munininfluxdb.commands.dump as m_dump + + +class TestDump(unittest.TestCase): + + def setUp(self): + self.patchers = set() + self.mocks = {} + for name in 'print munin rrd Settings Defaults Symbol'.split(): + patcher = mock.patch('munininfluxdb.commands.dump.%s' % name) + self.patchers.add(patcher) + self.mocks[name] = patcher.start() + + def tearDown(self): + for name, mock_ in self.mocks.items(): + print(name.center(40, '-')) + pprint(mock_.mock_calls) + for patcher in self.patchers: + patcher.stop() + + def test_retrieve_munin_configuration(self): + mock_settings = mock.MagicMock(name='mock-settings', + paths={'datafile': 'the-datafile'}) + self.mocks['munin'].discover_from_datafile.return_value = mock_settings + + m_dump.retrieve_munin_configuration(mock_settings) + + self.mocks['munin'].discover_from_datafile.assert_called_with( + mock_settings) + self.mocks['rrd'].check_rrd_files.assert_called_with(mock_settings) + + def test_main(self): + token = object() + mock_settings = mock.MagicMock() + self.mocks['Settings'].return_value = mock_settings + self.mocks['munin'].discover_from_datafile.return_value = mock_settings + + m_dump.main(token) + + self.mocks['Settings'].assert_called_with(token) + self.mocks['munin'].discover_from_datafile.assert_called_with( + mock_settings) + self.mocks['rrd'].export_to_xml.assert_called_with(mock_settings) + + def test_setup(self): + from argparse import ArgumentParser + + self.mocks['Defaults'].MUNIN_VAR_FOLDER = 'mvf' + self.mocks['Defaults'].MUNIN_RRD_FOLDER = 'mrf' + self.mocks['Defaults'].MUNIN_WWW_FOLDER = 'mwf' + self.mocks['Defaults'].MUNIN_XML_FOLDER = 'mxf' + + parser = ArgumentParser() + m_dump.setup(parser, {}) + + result = parser.parse_args(''.split()) + self.assertEqual(result.xml_temp_path, 'mxf') + self.assertEqual(result.keep_temp, False) + self.assertEqual(result.verbose, 1) + self.assertEqual(result.munin_path, 'mvf') + self.assertEqual(result.www, 'mwf') + self.assertEqual(result.rrd, 'mrf') From 7b797d3de98964d92a68379b1d7bc1d2c67b3586 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Fri, 28 Apr 2017 08:09:14 +0200 Subject: [PATCH 37/41] Add unit-tests for the "fetch" command. --- munininfluxdb/test/commands/test_fetch.py | 113 ++++++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 munininfluxdb/test/commands/test_fetch.py diff --git a/munininfluxdb/test/commands/test_fetch.py b/munininfluxdb/test/commands/test_fetch.py new file mode 100644 index 0000000..65042c7 --- /dev/null +++ b/munininfluxdb/test/commands/test_fetch.py @@ -0,0 +1,113 @@ +import unittest +from pprint import pprint + +from .. import mock + +import munininfluxdb.commands.fetch as m_fetch + + +class TestFetch(unittest.TestCase): + + def setUp(self): + self.patchers = set() + self.mocks = {} + + for name in 'print Defaults influxdb storable'.split(): + patcher = mock.patch('munininfluxdb.commands.fetch.%s' % name) + self.patchers.add(patcher) + self.mocks[name] = patcher.start() + + def tearDown(self): + for name, mock_ in self.mocks.items(): + print(name.center(40, '-')) + pprint(mock_.mock_calls) + for patcher in self.patchers: + patcher.stop() + + def test_pack_values(self): + config = { + 'metrics': { + 'metric_a': ('measurement_a', 'field_a'), + }, + 'tags': { + 'measurement_a': ['tag_a'] + }, + } + metric_a = mock.MagicMock() + metric_a.values.return_value = ((123, 10), (234, 11)) + metric_b = mock.MagicMock() + metric_b.values.return_value = ((345, 13), (456, 8)) + values = ( + { + 'metric_a': metric_a, + 'metric_b': metric_b, + }, + 12345 # unix timestamp + ) + result = m_fetch.pack_values(config, values) + + expected = [ + {'fields': {'field_a': 10.0}, + 'measurement': 'measurement_a', + 'tags': ['tag_a'], + 'time': 123} + ] + self.assertEqual(result, expected) + + def test_read_state_file(self): + self.mocks['storable'].retrieve.return_value = { + 'spoolfetch': 'spoolfetch-value', + 'value': 'myvalue' + } + result = m_fetch.read_state_file('my-state-file') + expected = ('myvalue', 'spoolfetch-value') + self.assertEqual(result, expected) + + def test_main(self): + mocked_args = mock.MagicMock(name='mock-args') + with mock.patch('munininfluxdb.commands.fetch.open', create=True), \ + mock.patch('munininfluxdb.commands.fetch.json') as m_json, \ + mock.patch('munininfluxdb.commands.fetch.pack_values') as m_pd, \ + mock.patch('munininfluxdb.commands.fetch.read_state_file'): + m_pd.return_value = [1, 2, 3] # just a dummy value + m_json.load.return_value = { + 'influxdb': { + 'host': 'influxdb.host', + 'user': 'influxdb.user', + 'port': 'influxdb.port', + 'password': 'influxdb.password', + 'database': 'influxdb.database', + }, + 'statefiles': ['statefile-1'], + 'lastupdate': 'lastupdate-value' + } + result = m_fetch.main(mocked_args) + + # --- Verify calls ------------------ + self.mocks['influxdb'].InfluxDBClient.assert_called_with( + 'influxdb.host', 'influxdb.port', 'influxdb.user', + 'influxdb.password', + ) + + idbclient = self.mocks['influxdb'].InfluxDBClient() + idbclient.get_list_database.assert_called_with() + idbclient.switch_database.assert_called_with('influxdb.database') + idbclient.write_points.assert_called_with( + m_pd.return_value, + time_precision='s' + ) + self.assertIsNone(result) + + def test_setup(self): + from argparse import ArgumentParser + + self.mocks['Defaults'].FETCH_CONFIG = 'foo' + + parser = ArgumentParser() + m_fetch.setup(parser, {}) + + result = parser.parse_args(''.split()) + self.assertEqual(result.config, 'foo') + + result = parser.parse_args('--config bar'.split()) + self.assertEqual(result.config, 'bar') From a4de0e60cdb6dfdd76efb6c1781eda48433b829a Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Sun, 30 Apr 2017 12:38:07 +0200 Subject: [PATCH 38/41] Add tests for the "import" command. --- munininfluxdb/test/commands/test_import.py | 107 +++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 munininfluxdb/test/commands/test_import.py diff --git a/munininfluxdb/test/commands/test_import.py b/munininfluxdb/test/commands/test_import.py new file mode 100644 index 0000000..d49bdcc --- /dev/null +++ b/munininfluxdb/test/commands/test_import.py @@ -0,0 +1,107 @@ +import unittest +from pprint import pprint + +from .. import mock + +import munininfluxdb.commands.import_ as m_import + + +class TestImport(unittest.TestCase): + + def setUp(self): + self.patchers = set() + self.mocks = {} + + for name in 'print munin rrd Settings InfluxdbClient Dashboard prompt ask_password raw_input'.split(): + patcher = mock.patch('munininfluxdb.commands.import_.%s' % name) + self.patchers.add(patcher) + self.mocks[name] = patcher.start() + + def tearDown(self): + for name, mock_ in self.mocks.items(): + print(name.center(40, '-')) + pprint(mock_.mock_calls) + for patcher in self.patchers: + patcher.stop() + + def test_main(self): + args = mock.MagicMock(name='cli-args') + + # --- prime mocks ----------------------- + settings = mock.MagicMock( + name='settings', + paths={ + 'xml': 'xmlpath', + 'fetch_config': 'fcpath', + }, + interactive=False, + influxdb={ + 'password': 'idbpwd', + 'database': 'idbdb', + 'host': 'idbhost', + 'group_fields': True, + }, + grafana={ + 'host': 'grafanahost', + 'create': True, + 'filename': 'grafanafile', + } + ) + dashboard = mock.MagicMock(name='dashboard') + self.mocks['Dashboard'].return_value = dashboard + self.mocks['Settings'].return_value = settings + self.mocks['rrd'].export_to_xml.return_value = 10 + exporter = mock.MagicMock(name='InfluxdbClient', settings=settings) + exporter.get_settings.return_value = settings + self.mocks['InfluxdbClient'].return_value = exporter + + # --- make the call --------------------- + m_import.main(args) + + # --- verify calls ---------------------- + exporter.connect.assert_called() + exporter.test_db.assert_called_with('idbdb') + exporter.import_from_xml.assert_called() + exporter.get_settings.assert_called() + + settings.save_fetch_config.assert_called() + + dashboard.generate.assert_called() + dashboard.upload.assert_called() + dashboard.save.assert_called() + + def test_setup(self): + from argparse import ArgumentParser + + parser = ArgumentParser() + m_import.setup(parser, {}) + + # Default values + result = parser.parse_args(''.split()) + + self.assertEqual(result.grafana_cols, 2) + self.assertEqual(result.grafana_file, + '/tmp/munin-influxdb/munin-grafana.json') + self.assertEqual(result.grafana_title, 'Munin Dashboard') + self.assertEqual(result.influxdb, 'root@localhost:8086/db/munin') + self.assertEqual(result.munin_path, '/var/lib/munin') + self.assertEqual(result.rrd, '/var/lib/munin') + self.assertEqual(result.verbose, 1) + self.assertEqual(result.www, '/var/cache/munin/www') + self.assertEqual(result.xml_temp_path, '/tmp/munin-influxdb/xml') + self.assertFalse(result.keep_temp) + + self.assertIsNone(result.grafana_tags) + + self.assertEqual(result.rrd, '/var/lib/munin') + self.assertEqual(result.www, '/var/cache/munin/www') + self.assertTrue(result.fetch_config_path.endswith( + '.config/munin-fetch-config.json')) + self.assertTrue(result.grafana) + self.assertTrue(result.group_fields) + self.assertTrue(result.show_minmax) + + # The above also checks that the values appear properly in the resulting + # namespace. + # TODO We should also test passing in other values and verify that they + # are properly checked. I'll skip this for now in order to advance. From 7bce6455697885043b03c5992df8d4461f97fa8e Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Sun, 30 Apr 2017 12:43:30 +0200 Subject: [PATCH 39/41] Prepare main module for unit-testing. --- munininfluxdb/main.py | 44 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 38 insertions(+), 6 deletions(-) diff --git a/munininfluxdb/main.py b/munininfluxdb/main.py index 53c428a..bdb719b 100644 --- a/munininfluxdb/main.py +++ b/munininfluxdb/main.py @@ -10,11 +10,40 @@ import munininfluxdb.external.cron as cron -def main(): +def dummy_function(args): + """ + Dummy function which is called if no subcommand was loaded. + + + Developer Note: + + This exists mainly that so that we can mock it during unit-tests and + check that the submodule function is called properly without actually + calling any subcommand. + """ + print('Dummy function. If you see this, no subcommand was loaded. ' + 'It is HIGHLY unlikely that you see this! ' + 'If you do, please contact the developers!') + return 0 + + +def main(args=None, commands=None): + # Allow some unit-tetst injections + args = args or sys.argv[1:] + if not commands: + commands = { + 'cron': cmd_cron, + 'dump': cmd_dump, + 'fetch': cmd_fetch, + 'import': cmd_import, + } + + # Prepare the CLI parser parser = ArgumentParser(description='TODO') # TODO parser.add_argument('--interactive', dest='interactive', action='store_true') parser.add_argument('--no-interactive', dest='interactive', action='store_false') parser.set_defaults(interactive=True) + parser.set_defaults(func=dummy_function) subparsers = parser.add_subparsers( title='subcommands', @@ -22,20 +51,23 @@ def main(): help='additional help' ) - for subcommand in (cmd_import, cmd_fetch, cmd_cron, cmd_dump): + # Initialise CLI argument parser for the subcommands + for subcommand in commands.values(): subparser = subparsers.add_parser(subcommand.NAME, description=subcommand.DESCRIPTION) subcommand.setup(subparser, { 'cron': cron }) - args = parser.parse_args() + # Parse the arguments and execute the command + namespace = parser.parse_args(args) try: - args.func(args) + namespace.func(namespace) except KeyboardInterrupt: print("\n{0} Canceled.".format(Symbol.NOK_RED)) - sys.exit(1) + return 1 except Exception as e: raise print("{0} Error: {1}".format(Symbol.NOK_RED, e)) - sys.exit(1) + return 1 + return 0 From b904dcf10e2d160b70c808989ca3d5df20c66c17 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Sun, 30 Apr 2017 13:06:27 +0200 Subject: [PATCH 40/41] Add unit-test for main entry-point. --- munininfluxdb/test/test_main.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 munininfluxdb/test/test_main.py diff --git a/munininfluxdb/test/test_main.py b/munininfluxdb/test/test_main.py new file mode 100644 index 0000000..2b49003 --- /dev/null +++ b/munininfluxdb/test/test_main.py @@ -0,0 +1,20 @@ +import unittest + +from munininfluxdb.main import main + +from . import mock + + +class TestMain(unittest.TestCase): + + def test_main(self): + args = 'import'.split() + mock_cmd = mock.MagicMock(NAME='import') + mocked_commands = { + 'import': mock_cmd + } + with mock.patch('munininfluxdb.main.dummy_function') as fun: + result = main(args, mocked_commands) + mock_cmd.setup.assert_called() + fun.assert_called() + self.assertEqual(result, 0) From 5d9855f5c214e108a1522d5da2a83b5fca9d9e42 Mon Sep 17 00:00:00 2001 From: Michel Albert Date: Sun, 30 Apr 2017 13:09:11 +0200 Subject: [PATCH 41/41] Remove "raise" which caused nonexecution of code --- munininfluxdb/main.py | 1 - 1 file changed, 1 deletion(-) diff --git a/munininfluxdb/main.py b/munininfluxdb/main.py index bdb719b..4c78780 100644 --- a/munininfluxdb/main.py +++ b/munininfluxdb/main.py @@ -67,7 +67,6 @@ def main(args=None, commands=None): print("\n{0} Canceled.".format(Symbol.NOK_RED)) return 1 except Exception as e: - raise print("{0} Error: {1}".format(Symbol.NOK_RED, e)) return 1 return 0