diff --git a/divvy/__init__.py b/divvy/__init__.py index 7201923..e6e86fb 100644 --- a/divvy/__init__.py +++ b/divvy/__init__.py @@ -8,9 +8,6 @@ """ import logging -import os -from sys import stdout - from ._version import __version__ from .compute import ComputingConfiguration from .const import * @@ -19,101 +16,8 @@ __classes__ = ["ComputingConfiguration"] __all__ = __classes__ + [write_submit_script.__name__] -LOGGING_LEVEL = "INFO" -LOGGING_LOCATIONS = (stdout, ) - -# Default user logging format is simple -DEFAULT_LOGGING_FMT = "%(message)s" -# Developer logger format is more information-rich -DEV_LOGGING_FMT = "%(module)s:%(lineno)d (%(funcName)s) [%(levelname)s] > %(message)s " - - # Ensure that we have a handler and don't get a logging exception. # Note that this was originally with looper.models. _LOGGER = logging.getLogger(__name__) if not logging.getLogger().handlers: _LOGGER.addHandler(logging.NullHandler()) - - -def setup_divvy_logger(level, additional_locations=None, devmode=False): - """ - Establish project logger.. - - :param int | str level: logging level - :param tuple(str | FileIO[str]) additional_locations: supplementary - destination(s) to which to ship logs - :param bool devmode: whether to use developer logging config - :return logging.Logger: project-root logger - """ - - logging.addLevelName(5, "VERY_FINE") - - fmt = DEV_LOGGING_FMT if devmode else DEFAULT_LOGGING_FMT - - # Establish the logger. - logger = logging.getLogger("divvy") - # First remove any previously-added handlers - logger.handlers = [] - logger.propagate = False - - # Handle int- or text-specific logging level. - try: - level = int(level) - except ValueError: - level = level.upper() - - try: - logger.setLevel(level) - except Exception: - logging.error("Can't set logging level to %s; instead using: '%s'", - str(level), str(LOGGING_LEVEL)) - level = LOGGING_LEVEL - logger.setLevel(level) - - # Process any additional locations. - locations_exception = None - where = LOGGING_LOCATIONS - if additional_locations: - if isinstance(additional_locations, str): - additional_locations = (additional_locations, ) - try: - where = LOGGING_LOCATIONS + tuple(additional_locations) - except TypeError as e: - locations_exception = e - if locations_exception: - logging.warn("Could not interpret {} as supplementary root logger " - "target destinations; using {} as root logger location(s)". - format(additional_locations, LOGGING_LOCATIONS)) - - # Add the handlers. - formatter = logging.Formatter(fmt=(fmt or DEFAULT_LOGGING_FMT)) - for loc in where: - if isinstance(loc, str): - # File destination - dirpath = os.path.abspath(os.path.dirname(loc)) - if not os.path.exists(dirpath): - os.makedirs(dirpath) - handler_type = logging.FileHandler - elif hasattr(loc, "write"): - # Stream destination - handler_type = logging.StreamHandler - else: - # Strange supplementary destination - logging.info("{} as logs destination appears to be neither " - "a filepath nor a stream.".format(loc)) - continue - - if handler_type is logging.FileHandler: - handler = handler_type(loc, mode='w') - else: - handler = handler_type(loc) - - handler.setLevel(level) - handler.setFormatter(formatter) - logger.addHandler(handler) - - return logger - - -# Default -setup_divvy_logger("INFO") diff --git a/divvy/_version.py b/divvy/_version.py index e9ba10a..e19434e 100644 --- a/divvy/_version.py +++ b/divvy/_version.py @@ -1,2 +1 @@ -__version__ = "0.3.2" - +__version__ = "0.3.3" diff --git a/divvy/compute.py b/divvy/compute.py index b321c1d..6a36f80 100644 --- a/divvy/compute.py +++ b/divvy/compute.py @@ -301,18 +301,12 @@ def main(): subparsers = parser.add_subparsers(dest="command") - - def add_subparser(cmd): - # Individual subcommands - msg_by_cmd = { - "list": "List available compute packages", - "write": "Write a submit script" - } + def add_subparser(cmd, description): return subparsers.add_parser( - cmd, description=msg_by_cmd[cmd], help=msg_by_cmd[cmd]) + cmd, description=description, help=description) - write_subparser = add_subparser("write") - list_subparser = add_subparser("list") + write_subparser = add_subparser("write", "Write a submit script") + list_subparser = add_subparser("list", "List available compute packages") write_subparser.add_argument( "-S", "--settings", diff --git a/divvy/utils.py b/divvy/utils.py index 6843371..2d446e1 100644 --- a/divvy/utils.py +++ b/divvy/utils.py @@ -1,17 +1,8 @@ """ Helpers without an obvious logical home. """ -from collections import defaultdict, Iterable -import contextlib import logging import os -import random import re -import string -import sys -if sys.version_info < (3, 0): - from urlparse import urlparse -else: - from urllib.parse import urlparse import warnings import yaml from .const import NEW_COMPUTE_KEY, OLD_COMPUTE_KEY @@ -51,54 +42,6 @@ def copy(self): return obj -def import_from_source(module_filepath): - """ - Import a module from a particular filesystem location. - - :param str module_filepath: path to the file that constitutes the module - to import - :return module: module imported from the given location, named as indicated - :raises ValueError: if path provided does not point to an extant file - """ - import sys - - if not os.path.exists(module_filepath): - raise ValueError("Path to alleged module file doesn't point to an " - "extant file: '{}'".format(module_filepath)) - - # Randomly generate module name. - fname_chars = string.ascii_letters + string.digits - name = "".join(random.choice(fname_chars) for _ in range(20)) - - # Import logic is version-dependent. - if sys.version_info >= (3, 5): - from importlib import util as _il_util - modspec = _il_util.spec_from_file_location( - name, module_filepath) - mod = _il_util.module_from_spec(modspec) - modspec.loader.exec_module(mod) - elif sys.version_info < (3, 3): - import imp - mod = imp.load_source(name, module_filepath) - else: - # 3.3 or 3.4 - from importlib import machinery as _il_mach - loader = _il_mach.SourceFileLoader(name, module_filepath) - mod = loader.load_module() - - return mod - - -def is_url(maybe_url): - """ - Determine whether a path is a URL. - - :param str maybe_url: path to investigate as URL - :return bool: whether path appears to be a URL - """ - return urlparse(maybe_url).scheme != "" - - def parse_config_file(conf_file): """ Parse a divvy configuration file. @@ -121,46 +64,6 @@ def parse_config_file(conf_file): return env_settings -def parse_text_data(lines_or_path, delimiter=os.linesep): - """ - Interpret input argument as lines of data. This is intended to support - multiple input argument types to core model constructors. - - :param str | collections.Iterable lines_or_path: - :param str delimiter: line separator used when parsing a raw string that's - not a file - :return collections.Iterable: lines of text data - :raises ValueError: if primary data argument is neither a string nor - another iterable - """ - - if os.path.isfile(lines_or_path): - with open(lines_or_path, 'r') as f: - return f.readlines() - else: - _LOGGER.debug("Not a file: '{}'".format(lines_or_path)) - - if isinstance(lines_or_path, str): - return lines_or_path.split(delimiter) - elif isinstance(lines_or_path, Iterable): - return lines_or_path - else: - raise ValueError("Unable to parse as data lines {} ({})". - format(lines_or_path, type(lines_or_path))) - - -def sample_folder(prj, sample): - """ - Get the path to this Project's root folder for the given Sample. - - :param PathExAttMap | Project prj: project with which sample is associated - :param Mapping sample: Sample or sample data for which to get root output - folder path. - :return str: this Project's root folder for the given Sample - """ - return os.path.join(prj.metadata.results_subdir, - sample["sample_name"]) - def write_submit_script(fp, content, data): """ @@ -191,170 +94,6 @@ def write_submit_script(fp, content, data): return fp -@contextlib.contextmanager -def standard_stream_redirector(stream): - """ - Temporarily redirect stdout and stderr to another stream. - - This can be useful for capturing messages for easier inspection, or - for rerouting and essentially ignoring them, with the destination as - something like an opened os.devnull. - - :param FileIO[str] stream: temporary proxy for standard streams - """ - import sys - genuine_stdout, genuine_stderr = sys.stdout, sys.stderr - sys.stdout, sys.stderr = stream, stream - try: - yield - finally: - sys.stdout, sys.stderr = genuine_stdout, genuine_stderr - - -def warn_derived_cols(): - _warn_cols_to_attrs("derived") - - -def warn_implied_cols(): - _warn_cols_to_attrs("implied") - - -def _warn_cols_to_attrs(prefix): - warnings.warn("{pfx}_columns should be encoded and referenced " - "as {pfx}_attributes".format(pfx=prefix), DeprecationWarning) - - -class CommandChecker(object): - """ - Validate PATH availability of executables referenced by a config file. - - :param str path_conf_file: path to configuration file with - sections detailing executable tools to validate - :param Iterable[str] sections_to_check: names of - sections of the given configuration file that are relevant; - optional, will default to all sections if not given, but some - may be excluded via another optional parameter - :param Iterable[str] sections_to_skip: analogous to - the check names parameter, but for specific sections to skip. - """ - - def __init__(self, path_conf_file, - sections_to_check=None, sections_to_skip=None): - - super(CommandChecker, self).__init__() - - self._logger = logging.getLogger( - "{}.{}".format(__name__, self.__class__.__name__)) - - # TODO: could provide parse strategy as parameter to supplement YAML. - # TODO: could also derive parsing behavior from extension. - self.path = path_conf_file - with open(self.path, 'r') as conf_file: - conf_data = yaml.safe_load(conf_file) - - # Determine which sections to validate. - sections = {sections_to_check} if isinstance(sections_to_check, str) \ - else set(sections_to_check or conf_data.keys()) - excl = {sections_to_skip} if isinstance(sections_to_skip, str) \ - else set(sections_to_skip or []) - sections -= excl - - self._logger.info("Validating %d sections: %s", - len(sections), - ", ".join(["'{}'".format(s) for s in sections])) - - # Store per-command mapping of status, nested under section. - self.section_to_status_by_command = defaultdict(dict) - # Store only information about the failures. - self.failures_by_section = defaultdict(list) # Access by section. - self.failures = set() # Access by command. - - for s in sections: - # Fetch section data or skip. - try: - section_data = conf_data[s] - except KeyError: - _LOGGER.info("No section '%s' in file '%s', skipping", - s, self.path) - continue - # Test each of the section's commands. - try: - # Is section's data a mapping? - commands_iter = section_data.items() - self._logger.debug("Processing section '%s' data " - "as mapping", s) - for name, command in commands_iter: - failed = self._store_status(section=s, command=command, - name=name) - self._logger.debug("Command '%s': %s", command, - "FAILURE" if failed else "SUCCESS") - except AttributeError: - self._logger.debug("Processing section '%s' data as list", s) - commands_iter = conf_data[s] - for cmd_item in commands_iter: - # Item is K-V pair? - try: - name, command = cmd_item - except ValueError: - # Treat item as command itself. - name, command = "", cmd_item - success = self._store_status(section=s, command=command, - name=name) - self._logger.debug("Command '%s': %s", command, - "SUCCESS" if success else "FAILURE") - - def _store_status(self, section, command, name): - """ - Based on new command execution attempt, update instance's - data structures with information about the success/fail status. - Return the result of the execution test. - """ - succeeded = is_command_callable(command, name) - # Store status regardless of its value in the instance's largest DS. - self.section_to_status_by_command[section][command] = succeeded - if not succeeded: - # Only update the failure-specific structures conditionally. - self.failures_by_section[section].append(command) - self.failures.add(command) - return succeeded - - @property - def failed(self): - """ - Determine whether *every* command succeeded for *every* config file - section that was validated during instance construction. - - :return bool: conjunction of execution success test result values, - obtained by testing each executable in every validated section - """ - # This will raise exception even if validation was attempted, - # but no sections were used. Effectively, delegate responsibility - # to the caller to initiate validation only if doing so is relevant. - if not self.section_to_status_by_command: - raise ValueError("No commands validated") - return 0 == len(self.failures) - - -def is_command_callable(command, name=""): - """ - Check if command can be called. - - :param str command: actual command to call - :param str name: nickname/alias by which to reference the command, optional - :return bool: whether given command's call succeeded - """ - - # Use `command` to see if command is callable, store exit code - code = os.system( - "command -v {0} >/dev/null 2>&1 || {{ exit 1; }}".format(command)) - - if code != 0: - alias_value = " ('{}') ".format(name) if name else " " - _LOGGER.debug("Command '{0}' is not callable: {1}". - format(alias_value, command)) - return not bool(code) - - def get_first_env_var(ev): """ Get the name and value of the first set environment variable diff --git a/docs/README.md b/docs/README.md index 28f52bd..872b4fc 100644 --- a/docs/README.md +++ b/docs/README.md @@ -10,7 +10,7 @@ In `divvy`, computing resources are organized as *compute packages*, which defin ## Installing -Releases are posted as [GitHub releases](https://github.com/databio/divvy/releases), or you can install from PyPI using `pip`: +Install from [GitHub releases](https://github.com/databio/divvy/releases) or from PyPI using `pip`: ```{console} diff --git a/docs/changelog.md b/docs/changelog.md index 6b9d6fa..4c32d16 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,6 +1,9 @@ # Changelog -## Unreleased +## [0.3.3] -- 2019-06-14 +### Changed +- Removed utilities that are in `peppy`. +- Removed logging setup function. ## [0.3.2] -- 2019-05-09 ### Changed diff --git a/mkdocs.yml b/mkdocs.yml index 15b661f..46f7364 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -17,7 +17,7 @@ nav: - Support: support.md - Contributing: contributing.md - Changelog: changelog.md - - DIVCFG examples: http://github.com/pepkit/pepenv + - DIVCFG examples: http://github.com/pepkit/divcfg theme: databio diff --git a/requirements/requirements-all.txt b/requirements/requirements-all.txt index c7e251a..6ccb37a 100644 --- a/requirements/requirements-all.txt +++ b/requirements/requirements-all.txt @@ -1,4 +1,4 @@ +attmap>=0.12.3 pandas>=0.20.2 pyyaml>=5.1 -attmap>=0.6 - +logmuse>=0.2.0 diff --git a/tests/divvy_tests/test_divvy.py b/tests/divvy_tests/test_divvy.py index b66ddca..2e2e859 100644 --- a/tests/divvy_tests/test_divvy.py +++ b/tests/divvy_tests/test_divvy.py @@ -82,7 +82,7 @@ def test_reset_active_settings(self, dcc): def test_reset_active_settings_works(self, dcc): """ Test if the settings are cleared """ dcc.reset_active_settings() - assert dcc.get_active_package() == {} + assert dcc.get_active_package() == PathExAttMap({}) class UpdatingPackagesTests: