diff --git a/hotsos/client.py b/hotsos/client.py index 8f058ebe4..0e6771dad 100755 --- a/hotsos/client.py +++ b/hotsos/client.py @@ -213,10 +213,7 @@ def __init__(self, plugins=None): all will be run. """ self._summary = OutputManager() - if plugins: - self.plugins = plugins - else: - self.plugins = plugintools.PLUGINS.keys() + self.plugins = plugins or plugintools.PLUGINS.keys() def setup_global_env(self): """ State saved here persists across all plugin runs. """ diff --git a/hotsos/core/plugins/openvswitch/common.py b/hotsos/core/plugins/openvswitch/common.py index 4acdd2b31..2bae92283 100644 --- a/hotsos/core/plugins/openvswitch/common.py +++ b/hotsos/core/plugins/openvswitch/common.py @@ -60,6 +60,6 @@ class OpenvSwitchEventHandlerBase(OpenvSwitchChecksBase, EventHandlerBase): @property def summary(self): # mainline all results into summary root - ret = self.load_and_run() + ret = self.run() if ret: return sorted_dict(ret) diff --git a/hotsos/core/plugintools.py b/hotsos/core/plugintools.py index d7aa964fb..704508b87 100644 --- a/hotsos/core/plugintools.py +++ b/hotsos/core/plugintools.py @@ -7,6 +7,7 @@ from hotsos.core.issues import IssuesManager from hotsos.core.log import log from hotsos.core.ycheck.scenarios import YScenarioChecker +from hotsos.core.ycheck.events import EventsPreloader PLUGINS = {} PLUGIN_RUN_ORDER = [] @@ -392,12 +393,13 @@ def run(self): part_mgr = PartManager() failed_parts = [] # The following are executed as part of each plugin run (but not last). - ALWAYS_RUN = {'auto_scenario_check': YScenarioChecker} + ALWAYS_RUN = {'auto_scenario_check': YScenarioChecker, + 'events_preload': EventsPreloader} for name, always_parts in ALWAYS_RUN.items(): # update current env to reflect actual part being run HotSOSConfig.part_name = name try: - always_parts().load_and_run() + always_parts().run() except Exception as exc: failed_parts.append(name) log.exception("part '%s' raised exception: %s", name, exc) diff --git a/hotsos/core/ycheck/engine/common.py b/hotsos/core/ycheck/engine/common.py index d0847259e..6d2e3bdfd 100644 --- a/hotsos/core/ycheck/engine/common.py +++ b/hotsos/core/ycheck/engine/common.py @@ -86,16 +86,6 @@ def searcher(self): @return: FileSearcher object to be used by this handler. """ - @abc.abstractmethod - def load(self): - """ - Load definitions from yaml in preparation for them to be processed. - """ - @abc.abstractmethod def run(self): """ Process operations. """ - - @abc.abstractmethod - def load_and_run(self): - """ Load and run. """ diff --git a/hotsos/core/ycheck/engine/properties/search.py b/hotsos/core/ycheck/engine/properties/search.py index 50f3c4669..81157f2ea 100644 --- a/hotsos/core/ycheck/engine/properties/search.py +++ b/hotsos/core/ycheck/engine/properties/search.py @@ -299,7 +299,7 @@ def is_sequence_search(self): @property def simple_search(self): if (self.is_sequence_search or not self.search_pattern or - bool(self.passthrough_results)): + self.passthrough_results): return sdef = self.cache.simple_search @@ -323,7 +323,7 @@ def simple_search(self): @property def sequence_search(self): - if not self.is_sequence_search or bool(self.passthrough_results): + if not self.is_sequence_search or self.passthrough_results: return sdef = self.cache.sequence_search @@ -334,7 +334,7 @@ def sequence_search(self): seq_body = self.body seq_end = self.end - if (seq_body or (seq_end and not bool(self.passthrough_results))): + if (seq_body or (seq_end and not self.passthrough_results)): sd_start = SearchDef(seq_start.search_pattern) sd_end = None @@ -355,12 +355,12 @@ def sequence_search(self): log.warning("invalid sequence definition passthrough=%s " "start=%s, body=%s, end=%s", - bool(self.passthrough_results), seq_start, seq_body, + self.passthrough_results, seq_start, seq_body, seq_end) @property def sequence_passthrough_search(self): - if not self.is_sequence_search or not bool(self.passthrough_results): + if not self.is_sequence_search or not self.passthrough_results: return sdef = self.cache.sequence_passthrough_search @@ -370,7 +370,7 @@ def sequence_passthrough_search(self): seq_start = self.start seq_end = self.end - if bool(self.passthrough_results) and all([seq_start, seq_end]): + if self.passthrough_results and all([seq_start, seq_end]): # start and end required for core.analytics.LogEventStats start_tag = "{}-start".format(self.unique_search_tag) end_tag = "{}-end".format(self.unique_search_tag) @@ -460,3 +460,11 @@ class YPropertySearch(YPropertySearchBase, YPropertyMappedOverrideBase): _override_keys = ['search'] _override_members = [YPropertySearchOpt, YPropertySearchConstraints, YPropertySequencePart] + + @property + def passthrough_results(self): + """ Override the member to ensure we always return a bool. """ + if not isinstance(self.content, dict): + return False + + return bool(self.content.get('passthrough-results', False)) diff --git a/hotsos/core/ycheck/events.py b/hotsos/core/ycheck/events.py index dc5fe23e8..ecd2fe756 100644 --- a/hotsos/core/ycheck/events.py +++ b/hotsos/core/ycheck/events.py @@ -1,5 +1,7 @@ import abc +import sys from functools import cached_property +from collections import UserDict from hotsos.core.config import HotSOSConfig from hotsos.core.log import log @@ -13,6 +15,7 @@ YHandlerBase, YDefsSection, ) +from hotsos.core.ycheck.engine.properties import search from hotsos.core.ycheck.engine.properties.search import CommonTimestampMatcher @@ -27,6 +30,17 @@ class EventCallbackNotFound(Exception): pass +class EventsSearchRegistryKeyNotFound(Exception): + def __init__(self, key, all_keys): + self.key = key + self.all_keys = all_keys + + def __str__(self): + return ("'{}' not found in event registry. Available keys are:" + "\n - {}". + format(self.key, '\n - '.join(self.all_keys))) + + class EventCallbackMeta(type): def __init__(cls, _name, _mro, members): @@ -225,7 +239,267 @@ def __call__(self): """ Callback method. """ -class EventHandlerBase(YHandlerBase, EventProcessingUtils): +class EventsSearchRegistry(UserDict): + """ + Maintains a set of event names - dot paths to events in yaml tree - that + have been registered as having a search property, a global FileSearcher + object and the results from running searches. This information is used + to load searches from a set of events, run them and save their results for + later retrieval. Search results are tagged with the names stored here. + + It might be the case that an event handler wants to use its own + FileSearcher in which case this supports setting a _custom_searcher that + is cleared when the global searcher is accessed. + """ + + def __init__(self): + self._global_searcher = None + self._custom_searcher = None + self._global_searcher_results = None + super().__init__() + + def __getitem__(self, key): + try: + return super().__getitem__(key) + except KeyError: + raise EventsSearchRegistryKeyNotFound( + key, + list(self.data)) from KeyError + + def get_global_searcher_results(self): + if self._global_searcher is None: + raise Exception("registry global searcher is None") + + if self._global_searcher_results is not None: + log.debug("using cached global event search results") + return self._global_searcher_results + + log.debug("fetching global event search results") + self._global_searcher_results = self._global_searcher.run() + return self._global_searcher_results + + def get_global_searcher(self, allow_create=False): + if self._global_searcher: + log.debug("using existing global searcher (%s)", + self._global_searcher) + return self._global_searcher + + if not allow_create: + raise Exception("global events searcher is not set but is " + "expected to be.") + + self._custom_searcher = None + constraint = SearchConstraintSearchSince( + ts_matcher_cls=CommonTimestampMatcher) + searcher = FileSearcher(constraint=constraint) + self._global_searcher = searcher + self._global_searcher_results = None + log.debug("creating new global searcher (%s)", searcher) + return searcher + + def set_custom_searcher(self, searcher): + self._custom_searcher = searcher + + @property + def current_searcher(self): + return self._custom_searcher or self.get_global_searcher() + + def _reset_searchers(self): + self._global_searcher = None + self._custom_searcher = None + self._global_searcher_results = None + + def reset(self, create_new_global_searcher=False): + log.info("resetting events global registry") + self._reset_searchers() + self.data = {} + if create_new_global_searcher: + self.get_global_searcher(allow_create=True) + + +class EventsBase(object): + # IMPORTANT: this state is maintained at class level so that all + # implementations can share it. It is therefore crucial that state is reset + # before loading a new set of event searches. + search_registry = EventsSearchRegistry() + + @staticmethod + def meets_requirements(event): + """ + If an event or group has a requirements property it must return True + for the events to be executed. + """ + if HotSOSConfig.force_mode: + return True + + if event.requires and not event.requires.result: + log.debug("event '%s' pre-requisites not met - " + "skipping", event.name) + return False + + return True + + @staticmethod + def skip_filtered_event(event_path): + """ + Apply event filter if provided. + """ + e_filter = HotSOSConfig.event_filter + if e_filter and event_path != e_filter: + log.info("skipping event %s (filter=%s)", event_path, e_filter) + return True + + return False + + @staticmethod + def get_defs(group=None): + """ + Load the event definitions for the current plugin. By default all are + loaded and if a group path is provided, only events that are part of + that group are included. + + @param group: a group path can be provided to include events part of a + group. + """ + log.debug("loading event defs (group=%s)", group) + plugin_defs = YDefsLoader('events').plugin_defs + if not plugin_defs: + return {} + + if not group: + return plugin_defs + + # Exclude events that are not part of the group. + groups = group.split('.') + for i, subgroup in enumerate(groups): + if i == 0: + plugin_defs = {subgroup: plugin_defs[subgroup]} + else: + prev = groups[i - 1] + plugin_defs[prev] = {subgroup: plugin_defs[prev][subgroup]} + + return plugin_defs + + @staticmethod + def _get_event_from_path(events, path): + """ + Walk down path until we hit the event containing the + search property. We skip root/plugin name at start and + ".search" at the end. + + @param event: YDefsSection object representing the entire tree of + events. + @param path: event search property resolve path. + """ + event = None + for branch in path.split('.')[1:-1]: + if event is None: + event = getattr(events, branch) + else: + event = getattr(event, branch) + + return event + + @classmethod + def _load_event_search(cls, event, searcher): + """ Load search information from event into searcher. + + @param event: YDefsSection event object + @param searcher: FileSearcher object + """ + allow_constraints = True + if event.input.command: + # don't apply constraints to command outputs + allow_constraints = False + + for path in event.input.paths: + log.debug("loading search for event %s (path=%s, tag=%s)", + event.resolve_path, + path, event.search.unique_search_tag) + # Add to registry in case it is needed by handlers e.g. for + # sequence lookups. + cls.search_registry[event.resolve_path] = {'search': + event.search} + event.search.load_searcher( + searcher, path, + allow_constraints=allow_constraints) + + @classmethod + def load_searches(cls, group=None, searcher=None): + """ + Find all events that have a search property and load their search into + the global searcher. A custom searcher will be used instead if + provided. + + @param group: a group path can be provided to filter a subset of + events. + @param searcher: customer FileSearcher object to be used instead of the + global searcher. + """ + if searcher is None: + searcher = cls.search_registry.get_global_searcher() + if len(searcher.catalog) > 0: + raise Exception("global event searcher catalog is not empty " + "and must be reset before loading so as not " + "to include searches from a previous run.") + + log.debug("started loading event (group=%s) searches into searcher " + "(%s)", group, searcher) + + search_props = set() + events = YDefsSection(HotSOSConfig.plugin_name, + cls.get_defs(group) or {}) + for prop in events.manager.properties.values(): + for item in prop: + if not issubclass(item['cls'], search.YPropertySearch): + break + + search_props.add(item['path']) + + if len(search_props) == 0: + log.debug("finished loading event searches but no search " + "properties found") + return + + log.debug("loading searches for %s events", len(search_props)) + for event_search_prop_path in search_props: + event = cls._get_event_from_path(events, event_search_prop_path) + if cls.skip_filtered_event(event.resolve_path): + log.debug("skipping event %s", event.resolve_path) + continue + + cls._load_event_search(event, searcher) + + log.debug("finished loading event searches into searcher " + "(registry has %s items)", len(cls.search_registry)) + + +class EventsPreloader(EventsBase): + """ + Pre-load all searches used in event definitions into a global FileSearcher + object and execute the search before running any event callbacks. + """ + + @classmethod + def execute(cls): + # Pre-load all event searches into a global event searcher + cls.load_searches() + # Run the searches so that results are ready when event handlers are + # run. + cls.search_registry.get_global_searcher_results() + + @classmethod + def reset(cls): + # Make sure we start with a clean registry + cls.search_registry.reset(create_new_global_searcher=True) + + @classmethod + def run(cls): + cls.reset() + cls.execute() + + +class EventHandlerBase(EventsBase, YHandlerBase, EventProcessingUtils): """ Root name used to identify a group of event definitions. Once all the yaml definitions are loaded this defines the level below which events @@ -235,60 +509,63 @@ class EventHandlerBase(YHandlerBase, EventProcessingUtils): def __init__(self, *args, searcher=None, **kwargs): """ - @param searcher: optional FileSearcher object. When running many event - checkers it is more efficient to share a - FileSearcher across them so that all searches are - done at once. + @param searcher: optional FileSearcher object. If not provided then the + global searcher will be used which is the recommended + approach so that all searches are aggregated into one + operation and therefore files only need to be searched + once. """ super().__init__(*args, **kwargs) - if not searcher: - log.debug("creating searcher for event checker") - searcher = FileSearcher(constraint=SearchConstraintSearchSince( - ts_matcher_cls=CommonTimestampMatcher)) + if searcher is None: + log.debug("no searcher provided - using global searcher") + searcher = self.search_registry.get_global_searcher() + # If no searcher is provided it is assumed that the global searcher + # already exists, is loaded with searches and they have been + # executed. Unit tests however, should be resetting the registry + # prior to each run and we will therefore need to load searches + # each time which is why we do this here. This is therefore not + # intended to be used outside of a test scenario. + if len(searcher.catalog) == 0: + log.info("global searcher catalog is empty so launching " + "pre-load of event searches for group '%s'", + self.event_group) + # NOTE: this is not re-entrant safe and is only ever expected + # to be done from a unit test. + self.load_searches(group=self.event_group) + else: + # If a searcher is provided we switch over but do not clear global + # searcher. + if self.search_registry._custom_searcher != searcher: + self.search_registry.set_custom_searcher(searcher) + + self.load_searches(group=self.event_group, searcher=searcher) - self._searcher = searcher self._event_results = None @property def searcher(self): - return self._searcher + """ + Return the current searcher we are using. If custom searcher is no + longer needed it is expected that it will have been cleared in the + __init__ method. + """ + return self.search_registry.current_searcher @cached_property - def event_definitions(self): + def events(self): """ Load event definitions from yaml. """ - _event_defs = {} - - plugin = YDefsLoader('events').plugin_defs - if not plugin: - return _event_defs - - log.debug("loading defs for subgroup=%s", self.event_group) - ytree = plugin - ypath = self.event_group.split('.') - for i, g in enumerate(ypath): - if i >= len(ypath) - 1: - group_defs = ytree.get(g) - else: - ytree = ytree.get(g) - - group = YDefsSection(self.event_group, group_defs) + group = YDefsSection(HotSOSConfig.plugin_name, + self.get_defs(self.event_group) or {}) log.debug("sections=%s, events=%s", len(list(group.branch_sections)), len(list(group.leaf_sections))) + _events = {} for event in group.leaf_sections: - fullname = "{}.{}.{}".format(HotSOSConfig.plugin_name, - event.parent.name, event.name) - if (HotSOSConfig.event_filter and - fullname != HotSOSConfig.event_filter): - log.info("skipping event %s (filter=%s)", fullname, - HotSOSConfig.event_filter) + if self.skip_filtered_event(event.resolve_path): continue - if (not HotSOSConfig.force_mode and event.requires and not - event.requires.result): - log.error("event '%s' pre-requisites not met - " - "skipping", event.name) + if not self.meets_requirements(event): return {} log.debug("event: %s", event.name) @@ -296,43 +573,44 @@ def event_definitions(self): event.input.command is not None) section_name = event.parent.name - if section_name not in _event_defs: - _event_defs[section_name] = {} - - for path in event.input.paths: - if event.input.command: - # don't apply constraints to command outputs - allow_constraints = False - else: - allow_constraints = True - - event.search.load_searcher(self.searcher, path, - allow_constraints=allow_constraints) + if section_name not in _events: + _events[section_name] = {} - passthrough = bool(event.search.passthrough_results) - emeta = {'passthrough': passthrough, - 'sequence': event.search.sequence_search, - 'tag': event.search.unique_search_tag} - _event_defs[section_name][event.name] = emeta + _events[section_name][event.name] = event.resolve_path - return _event_defs - - def load(self): - """ Pre-load event definitions. """ - self.event_definitions + return _events @property def final_event_results(self): """ Cache of results in case run() is called again. """ return self._event_results - def run(self, results): + def _get_event_search_results(self, event_search, global_results): + if event_search.passthrough_results: + # this is for implementations that have their own means of + # retrieving results. + return global_results + + seq_def = event_search.sequence_search + if seq_def: + search_results = global_results.find_sequence_sections(seq_def) + if search_results: + return search_results.values() + else: + return global_results.find_by_tag(event_search.unique_search_tag) + + def run(self, results=None): """ Process each event and call respective callback functions when results where found. - @param results: SearchResultsCollection object. + @param results: If no results are provides we get them from the global + searcher. This is provided for the case where a custom + searcher is in use. """ + if results is None: + results = self.search_registry.get_global_searcher_results() + if self.final_event_results is not None: return self.final_event_results @@ -340,28 +618,17 @@ def run(self, results): raise Exception("need to register at least one callback for " "event handler.") - log.debug("registered callbacks:\n%s", '\n'.join(CALLBACKS.keys())) + log.debug("registered event callbacks:\n%s", '\n'. + join(CALLBACKS.keys())) info = {} - for section_name, section in self.event_definitions.items(): - for event, event_meta in section.items(): - search_tag = event_meta['tag'] - seq_def = None - if event_meta['passthrough']: - # this is for implementations that have their own means of - # retrieving results. - search_results = results - else: - seq_def = event_meta['sequence'] - if seq_def: - search_results = results.find_sequence_sections( - seq_def) - if search_results: - search_results = search_results.values() - else: - search_results = results.find_by_tag(search_tag) - + for section_name, section in self.events.items(): + for event, fullname in section.items(): + event_search = self.search_registry[fullname]['search'] + search_results = self._get_event_search_results(event_search, + results) if not search_results: - log.debug("event %s did not yield any results", event) + log.debug("event %s (tag=%s) did not yield any results", + event, event_search.unique_search_tag) continue # We want this to throw an exception if the callback is not @@ -373,18 +640,22 @@ def run(self, results): raise EventCallbackNotFound(msg) callback = CALLBACKS[callback_name] - event_results_obj = EventCheckResult(section_name, event, - search_results, - search_tag, - self.searcher, - sequence_def=seq_def) + seq_def = event_search.sequence_search + event_result = EventCheckResult(section_name, event, + search_results, + event_search.unique_search_tag, + self.searcher, + sequence_def=seq_def) log.debug("executing event %s.%s callback '%s'", - event_results_obj.section, event, - callback_name) - ret = callback()(event_results_obj) + event_result.section, event, callback_name) + ret = callback()(event_result) if not ret: continue + log.debug("event %s.%s callback '%s' return size is %d bytes", + event_result.section, event, callback_name, + sys.getsizeof(ret)) + # if the return is a tuple it is assumed to be of the form # (, ) where is used to # override the output key for the result which defaults to the @@ -406,7 +677,3 @@ def run(self, results): if info: self._event_results = info return info - - def load_and_run(self): - self.load() - return self.run(self.searcher.run()) diff --git a/hotsos/core/ycheck/scenarios.py b/hotsos/core/ycheck/scenarios.py index 856a749ab..3dc8c5499 100644 --- a/hotsos/core/ycheck/scenarios.py +++ b/hotsos/core/ycheck/scenarios.py @@ -137,7 +137,10 @@ def _run_scenario_conclusion(self, scenario, issue_mgr): else: log.debug("no conclusions reached") - def run(self): + def run(self, load=True): + if load: + self.load() + failed_scenarios = [] issue_mgr = IssuesManager() for scenario in self.scenarios: @@ -155,7 +158,3 @@ def run(self): "debug mode (--debug) to get more detail". format(', '.join(failed_scenarios))) issue_mgr.add(HotSOSScenariosWarning(msg)) - - def load_and_run(self): - self.load() - return self.run() diff --git a/hotsos/defs/events/openstack/apparmor.yaml b/hotsos/defs/events/openstack/apparmor.yaml index 6c0b54377..80ca3d2f8 100644 --- a/hotsos/defs/events/openstack/apparmor.yaml +++ b/hotsos/defs/events/openstack/apparmor.yaml @@ -10,8 +10,8 @@ denials: # day (\d{1,2}), time ([\d:]+) and key (\S+neutron\S+) separated for # grouping. See AgentApparmorChecks class for more details. nova: - expr: '(\w{3,5})\s+(\d{1,2})\s+([\d:]+)\s+.+apparmor="DENIED".+\s+profile="(\S+nova\S+)"\s+.+' + expr: '(\w{3,5})\s+(\d{1,2})\s+([\d:]+)\s+.+apparmor="DENIED".+\s+profile="(\S+nova\S+)"' hint: apparmor neutron: - expr: '(\w{3,5})\s+(\d{1,2})\s+([\d:]+)\s+.+apparmor="DENIED".+\s+profile="(\S+neutron\S+)"\s+.+' + expr: '(\w{3,5})\s+(\d{1,2})\s+([\d:]+)\s+.+apparmor="DENIED".+\s+profile="(\S+neutron\S+)"' hint: apparmor diff --git a/hotsos/defs/events/openstack/neutron/ml2-routers.yaml b/hotsos/defs/events/openstack/neutron/ml2-routers.yaml index f21a87c1d..499e51403 100644 --- a/hotsos/defs/events/openstack/neutron/ml2-routers.yaml +++ b/hotsos/defs/events/openstack/neutron/ml2-routers.yaml @@ -4,6 +4,6 @@ l3ha: input: command: journalctl options: - args-callback: hotsos.plugin_extensions.openstack.agent.events.NeutronL3HAEventChecks.journalctl_args + args-callback: hotsos.plugin_extensions.openstack.agent.events.NeutronL3HAEventCheckJournalCtl.args # timestamp at start of line will be in journalctl -oshort-iso format expr: '([\d-]+)T([\d:]+)\S+ \S+ Keepalived_vrrp\[\d+\]: (?:VRRP_Instance)?\(VR_(\d+)\) .+ (\S+) STATE' diff --git a/hotsos/defs/events/openstack/nova/external-events.yaml b/hotsos/defs/events/openstack/nova/external-events.yaml index b51dac728..6510424b5 100644 --- a/hotsos/defs/events/openstack/nova/external-events.yaml +++ b/hotsos/defs/events/openstack/nova/external-events.yaml @@ -8,6 +8,6 @@ input: # Supported events - https://docs.openstack.org/api-ref/compute/?expanded=run-events-detail#create-external-events-os-server-external-events events: network-changed: - expr: '[\d-]+ [\d:]+\.\d{3} .+\[instance: (\S+)\].+Received event (network-changed)-(\S+)\s+' + expr: '[\d-]+ [\d:]+\.\d{3} .+\[instance: (\S+)\].+Received event network-changed-(\S+)' network-vif-plugged: - expr: '[\d-]+ [\d:]+\.\d{3} .+\[instance: (\S+)\].+Preparing to wait for external event (network-vif-plugged)-(\S+)\s+' + expr: '[\d-]+ [\d:]+\.\d{3} .+\[instance: (\S+)\].+Preparing to wait for external event network-vif-plugged-(\S+)' diff --git a/hotsos/defs/events/openstack/octavia.yaml b/hotsos/defs/events/openstack/octavia.yaml index 6c272c374..024891ddc 100644 --- a/hotsos/defs/events/openstack/octavia.yaml +++ b/hotsos/defs/events/openstack/octavia.yaml @@ -3,7 +3,7 @@ octavia-health-manager: input: path: 'var/log/octavia/octavia-health-manager.log' amp-missed-heartbeats: - expr: '([\d-]+) ([\d:]+)\.\d{3} .+ Amphora (\S+) health message was processed too slowly:.+' + expr: '([\d-]+) ([\d:]+)\.\d{3} .+ Amphora (\S+) health message was processed too slowly:' hint: 'Amphora' lb-failover-auto: expr: '([\d-]+) ([\d:]+)\.\d{3} .+ Performing failover for amphora:\s+(.+)' diff --git a/hotsos/defs/events/openvswitch/ovn/errors-and-warnings.yaml b/hotsos/defs/events/openvswitch/ovn/errors-and-warnings.yaml index c7b16b07e..309b5c6db 100644 --- a/hotsos/defs/events/openvswitch/ovn/errors-and-warnings.yaml +++ b/hotsos/defs/events/openvswitch/ovn/errors-and-warnings.yaml @@ -3,19 +3,19 @@ ovsdb-server-sb: input: path: 'var/log/ovn/ovsdb-server-sb.log' hint: '(ERR|WARN|EMER)' - expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)\|.+' + expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)' ovsdb-server-nb: input: path: 'var/log/ovn/ovsdb-server-nb.log' hint: '(ERR|WARN|EMER)' - expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)\|.+' + expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)' ovn-northd: input: path: 'var/log/ovn/ovn-northd.log' hint: '(ERR|WARN|EMER)' - expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)\|.+' + expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)' ovn-controller: input: path: 'var/log/ovn/ovn-controller.log' hint: '(ERR|WARN|EMER)' - expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)\|.+' + expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)' diff --git a/hotsos/defs/events/openvswitch/ovs/errors-and-warnings.yaml b/hotsos/defs/events/openvswitch/ovs/errors-and-warnings.yaml index 8497d54db..b400b6fbb 100644 --- a/hotsos/defs/events/openvswitch/ovs/errors-and-warnings.yaml +++ b/hotsos/defs/events/openvswitch/ovs/errors-and-warnings.yaml @@ -2,10 +2,8 @@ ovs-vswitchd: input: path: 'var/log/openvswitch/ovs-vswitchd.log' - hint: '(ERR|WARN|EMER)' - expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)\|.+' + expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)' ovsdb-server: input: path: 'var/log/openvswitch/ovsdb-server.log' - hint: '(ERR|WARN|EMER)' - expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)\|.+' + expr: '([\d-]+)T[\d:]+\.\d+Z.+\|(ERR|ERROR|WARN|EMER)' diff --git a/hotsos/defs/events/openvswitch/ovs/ovs-vswitchd.yaml b/hotsos/defs/events/openvswitch/ovs/ovs-vswitchd.yaml index c7b2647b4..d3b19e0f0 100644 --- a/hotsos/defs/events/openvswitch/ovs/ovs-vswitchd.yaml +++ b/hotsos/defs/events/openvswitch/ovs/ovs-vswitchd.yaml @@ -18,7 +18,5 @@ inactivity-probe: involuntary-context-switches: # we capture date and hour as subgroups expr: '([\d-]+)T(\d+):[\d:]+\.\d+Z.+\|timeval\|WARN\|context switches: 0 voluntary, (\d+) involuntary' - hint: timeval assertion-failures: expr: '([\d-]+)T[\d:]+\.\d+Z.+\|util.+\|EMER\|\S+: assertion ' - hint: assertion diff --git a/hotsos/plugin_extensions/openstack/agent/events.py b/hotsos/plugin_extensions/openstack/agent/events.py index 33c9e4736..53eccb51a 100644 --- a/hotsos/plugin_extensions/openstack/agent/events.py +++ b/hotsos/plugin_extensions/openstack/agent/events.py @@ -347,10 +347,9 @@ def __call__(self, event): return {'transitions': transitions}, 'keepalived' -class NeutronL3HAEventChecks(OpenstackEventHandlerBase): - event_group = 'neutron.ml2-routers' +class NeutronL3HAEventCheckJournalCtl(object): - def journalctl_args(self): + def args(self): """ Args callback for event cli command """ args = [] kwargs = {'unit': 'neutron-l3-agent'} @@ -359,6 +358,10 @@ def journalctl_args(self): return args, kwargs + +class NeutronL3HAEventChecks(OpenstackEventHandlerBase): + event_group = 'neutron.ml2-routers' + def __109_summary_neutron_l3ha(self): return self.final_event_results @@ -374,9 +377,6 @@ def _run_checks(self, checks): searcher = FileSearcher(constraint=SearchConstraintSearchSince( ts_matcher_cls=CommonTimestampMatcher)) check_objs = [c(searcher=searcher) for c in checks] - for check in check_objs: - check.load() - results = searcher.run() _final_results = {} for check in check_objs: diff --git a/hotsos/plugin_extensions/openstack/nova_external_events.py b/hotsos/plugin_extensions/openstack/nova_external_events.py index 74d1b052d..a21685c7b 100644 --- a/hotsos/plugin_extensions/openstack/nova_external_events.py +++ b/hotsos/plugin_extensions/openstack/nova_external_events.py @@ -36,14 +36,14 @@ def __call__(self, event): s = FileSearcher(constraint=c) for result in event.results: instance_id = result.get(1) - event_id = result.get(3) + event_id = result.get(2) result_path = event.searcher.resolve_source_id(result.source_id) events[event_id] = {'instance_id': instance_id, 'data_source': result_path} for stage in EXT_EVENT_META[event.name]['stages_keys']: - expr = (r".+\[instance: {}\]\s+{}\s.*\s?event\s+{}-{}.? " - ".+". + expr = (r"[\d-]+ [\d:]+\.\d{{3}} .+\[instance: {}\]" + r"\s+{}\s.*\s?event\s+{}-{}.?". format(instance_id, stage, event.name, event_id)) tag = "{}_{}_{}".format(instance_id, event_id, stage) sd = SearchDef(expr, tag, hint=event.name, @@ -84,4 +84,4 @@ class NovaExternalEventChecks(OpenstackEventHandlerBase): summary_part_index = 1 def __8_summary_os_server_external_events(self): - return self.load_and_run() + return self.run() diff --git a/hotsos/plugin_extensions/openstack/vm_info.py b/hotsos/plugin_extensions/openstack/vm_info.py index a6bf37ed5..6f777a2a1 100644 --- a/hotsos/plugin_extensions/openstack/vm_info.py +++ b/hotsos/plugin_extensions/openstack/vm_info.py @@ -179,4 +179,4 @@ class NovaServerMigrationAnalysis(OpenstackEventHandlerBase): summary_part_index = 3 def __6_summary_nova_migrations(self): - return self.load_and_run() + return self.run() diff --git a/hotsos/plugin_extensions/storage/ceph_event_checks.py b/hotsos/plugin_extensions/storage/ceph_event_checks.py index 2365dc9da..ef964495c 100644 --- a/hotsos/plugin_extensions/storage/ceph_event_checks.py +++ b/hotsos/plugin_extensions/storage/ceph_event_checks.py @@ -121,4 +121,4 @@ class CephEventHandler(CephChecksBase, EventHandlerBase): @property def summary(self): # mainline all results into summary root - return self.load_and_run() + return self.run() diff --git a/tests/unit/storage/test_ceph_osd.py b/tests/unit/storage/test_ceph_osd.py index dd66842ad..7bf4dc804 100644 --- a/tests/unit/storage/test_ceph_osd.py +++ b/tests/unit/storage/test_ceph_osd.py @@ -5,6 +5,7 @@ from hotsos.core.plugins.storage import ( ceph as ceph_core, ) +from hotsos.core.ycheck.events import EventsPreloader from hotsos.plugin_extensions.storage import ( ceph_summary, ceph_event_checks, @@ -212,6 +213,11 @@ def test_ceph_daemon_log_checker(self, mock_cli): mock_cli.return_value.date.return_value = "2021-01-01 00:00:00" result = {'crc-err-bluestore': {'2021-02-12': 5, '2021-02-13': 1}, 'crc-err-rocksdb': {'2021-02-12': 7}} + + # This is done in setUp but we have to repeat here otherwise the + # date() mock will not be used in the search constraints. + EventsPreloader.reset() + inst = ceph_event_checks.CephEventHandler() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual, result) diff --git a/tests/unit/test_openstack.py b/tests/unit/test_openstack.py index f237a1087..a7b9947c0 100644 --- a/tests/unit/test_openstack.py +++ b/tests/unit/test_openstack.py @@ -896,7 +896,7 @@ def test_process_rpc_loop_results(self): section_key = "neutron-ovs-agent" inst = agent.events.NeutronAgentEventChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual[section_key], expected) @@ -956,7 +956,7 @@ def test_get_router_event_stats(self): section_key = "neutron-l3-agent" inst = agent.events.NeutronAgentEventChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual[section_key], expected) @@ -978,10 +978,10 @@ def test_run_octavia_checks(self): '2022-03-09': 1}} } } + inst = agent.events.OctaviaAgentEventChecks() + inst.run() + actual = self.part_output_to_actual(inst.output) for section_key in ['amp-missed-heartbeats', 'lb-failovers']: - inst = agent.events.OctaviaAgentEventChecks() - inst.load_and_run() - actual = self.part_output_to_actual(inst.output) self.assertEqual(actual["octavia"][section_key], expected[section_key]) @@ -990,10 +990,10 @@ def test_run_octavia_checks(self): def test_run_apache_checks(self): expected = {'connection-refused': { '2021-10-26': {'127.0.0.1:8981': 3}}} + inst = agent.events.ApacheEventChecks() + inst.run() + actual = self.part_output_to_actual(inst.output) for section_key in ['connection-refused']: - inst = agent.events.ApacheEventChecks() - inst.load_and_run() - actual = self.part_output_to_actual(inst.output) self.assertEqual(actual['apache'][section_key], expected[section_key]) @@ -1008,7 +1008,7 @@ def test_run_apparmor_checks(self): '/usr/bin/neutron-openvswitch-agent': { 'Mar 3': 4}}}} inst = agent.events.AgentApparmorChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual['apparmor'], expected) @@ -1030,7 +1030,7 @@ def test_run_apparmor_checks_w_time_granularity(self): '22:57:22': 1, '22:57:24': 2}}}}} inst = agent.events.AgentApparmorChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual['apparmor'], expected) @@ -1041,7 +1041,7 @@ def test_run_nova_checks(self): '2022-09-17': {'0000:3b:0f.7': 1, '0000:3b:10.0': 1}}} inst = agent.events.NovaComputeEventChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual["nova"], expected) @@ -1051,7 +1051,7 @@ def test_run_neutron_l3ha_checks(self): '984c22fd-64b3-4fa1-8ddd-87090f401ce5': { '2022-02-10': 1}}}} inst = agent.events.NeutronL3HAEventChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual["neutron-l3ha"], expected) @@ -1064,7 +1064,7 @@ def test_run_neutron_l3ha_checks_w_issue(self): '984c22fd-64b3-4fa1-8ddd-87090f401ce5': { '2022-02-10': 1}}}} inst = agent.events.NeutronL3HAEventChecks() - inst.load_and_run() + inst.run() actual = self.part_output_to_actual(inst.output) self.assertEqual(actual["neutron-l3ha"], expected) issues = list(IssuesStore().load().values())[0] @@ -1076,7 +1076,7 @@ def test_run_neutron_l3ha_checks_w_issue(self): NEUTRON_HTTP}) def test_api_events(self): inst = agent.events.APIEvents() - inst.load_and_run() + inst.run() expected = {'http-requests': {'neutron': { '2022-05-11': {'GET': 2, 'PUT': 3, @@ -1089,7 +1089,7 @@ def test_api_events(self): OVSDBAPP_LEADER_CHANGING}) def test_server_ovsdbapp_events(self): inst = agent.events.NeutronAgentEventChecks() - inst.load_and_run() + inst.run() expected = {'neutron-server': { 'ovsdbapp-nb-leader-reconnect': { '2023-11-23': {'6641': 2}}, @@ -1102,7 +1102,7 @@ def test_server_ovsdbapp_events(self): OVN_RESOURCE_VERSION_BUMP_EVENTS}) def test_server_ovn_resource_version_bump_events(self): inst = agent.events.NeutronAgentEventChecks() - inst.load_and_run() + inst.run() expected = {'neutron-server': { 'ovn-resource-revision-bump': {'2023-12-09': { '4dedf9dd-ff5e-4b71-bebb-9d168b83c0b8': 2, @@ -1115,7 +1115,7 @@ def test_server_ovn_resource_version_bump_events(self): OVN_OVSDB_ABORTED_TRANSACTIONS}) def test_server_ovsdb_aborted_transactions(self): inst = agent.events.NeutronAgentEventChecks() - inst.load_and_run() + inst.run() expected = {'neutron-server': { 'ovsdb-transaction-aborted': {'2023-12-12': 5}}} actual = self.part_output_to_actual(inst.output) diff --git a/tests/unit/test_openvswitch.py b/tests/unit/test_openvswitch.py index 620c32d29..5cd5860cc 100644 --- a/tests/unit/test_openvswitch.py +++ b/tests/unit/test_openvswitch.py @@ -511,7 +511,7 @@ def __init__(self, svcs): self.processes[svc] = 1 mock_systemd.side_effect = FakeSystemdHelper - YScenarioChecker().load_and_run() + YScenarioChecker().run() msg = ('The ovn-northd service on this ovn-central host is not ' 'active/running which means that changes made to the ' 'northbound database are not being ported to the southbound ' @@ -544,7 +544,7 @@ def start_time_secs(self): # pylint: disable=W0236 with mock.patch(('hotsos.core.host_helpers.systemd.SystemdHelper.' 'services'), services): - YScenarioChecker().load_and_run() + YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues), 0) @@ -573,7 +573,7 @@ def start_time_secs(self): # pylint: disable=W0236 with mock.patch(('hotsos.core.host_helpers.systemd.SystemdHelper.' 'services'), services): - YScenarioChecker().load_and_run() + YScenarioChecker().run() msg = ("One or more of services ovn-northd, ovn-ovsdb-server-nb " "and ovn-ovsdb-server-sb has not been restarted since ssl " "certs were updated and this may breaking their ability to " @@ -601,7 +601,7 @@ def start_time_secs(self): # pylint: disable=W0236 FakeSystemdService('ovn-controller', 'enabled')} with mock.patch(('hotsos.core.host_helpers.systemd.SystemdHelper.' 'services'), services): - YScenarioChecker().load_and_run() + YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues), 0) @@ -625,7 +625,7 @@ def start_time_secs(self): # pylint: disable=W0236 FakeSystemdService('ovn-controller', 'enabled')} with mock.patch(('hotsos.core.host_helpers.systemd.SystemdHelper.' 'services'), services): - YScenarioChecker().load_and_run() + YScenarioChecker().run() msg = ("ovn-controller has not been restarted since ssl certs " "were updated so may be using old certs. Please check.") issues = list(IssuesStore().load().values())[0] diff --git a/tests/unit/test_ycheck_events.py b/tests/unit/test_ycheck_events.py index 20a2037e8..5477a9c95 100644 --- a/tests/unit/test_ycheck_events.py +++ b/tests/unit/test_ycheck_events.py @@ -51,15 +51,14 @@ """ EVENT_DEF_SIMPLE = r""" -myplugin: - myeventgroup: - input: - path: a/path - myeventsubgroup: - event1: - expr: 'event1' - event2: - expr: 'event2' +myeventgroup: + input: + path: a/path + myeventsubgroup: + event1: + expr: 'event1' + event2: + expr: 'event2' """ # noqa EVENT_DEF_MULTI_SEARCH = r""" @@ -183,7 +182,7 @@ class MyEventHandler(EventHandlerBase): def event_group(self): return 'mygroup' - MyEventHandler().load_and_run() + MyEventHandler().run() self.assertEqual(match_count['count'], 3) self.assertEqual(list(callbacks_called.keys()), ['my-sequence-search', @@ -204,9 +203,10 @@ def event_group(self): return 'mygroup' with self.assertRaises(EventCallbackNotFound): - MyEventHandler().load_and_run() + MyEventHandler().run() - @utils.create_data_root({'events/myplugin/mygroup.yaml': EVENT_DEF_SIMPLE}) + @utils.create_data_root({'events/myplugin/mygroup.yaml': EVENT_DEF_SIMPLE, + 'a/path': 'content'}) def test_events_filter_none(self): HotSOSConfig.plugin_yaml_defs = HotSOSConfig.data_root HotSOSConfig.plugin_name = 'myplugin' @@ -217,22 +217,22 @@ class MyEventHandler(EventHandlerBase): def event_group(self): return 'mygroup' - prefix = 'mygroup.myplugin.myeventgroup.myeventsubgroup' defs = {'myeventsubgroup': { - 'event1': { - 'passthrough': False, - 'sequence': None, - 'tag': prefix + '.event1.search'}, - 'event2': { - 'passthrough': False, - 'sequence': None, - 'tag': prefix + '.event2.search'}}} - self.assertEqual(MyEventHandler().event_definitions, defs) - - @utils.create_data_root({'events/myplugin/mygroup.yaml': EVENT_DEF_SIMPLE}) + 'event1': ('myplugin.mygroup.myeventgroup.myeventsubgroup.' + 'event1'), + 'event2': ('myplugin.mygroup.myeventgroup.myeventsubgroup.' + 'event2')}} + + handler = MyEventHandler() + self.assertEqual(handler.events, defs) + self.assertEqual(len(handler.searcher.catalog), 1) + + @utils.create_data_root({'events/myplugin/mygroup.yaml': EVENT_DEF_SIMPLE, + 'a/path': 'content'}) def test_events_filter_event2(self): HotSOSConfig.plugin_yaml_defs = HotSOSConfig.data_root - HotSOSConfig.event_filter = 'myplugin.myeventsubgroup.event2' + HotSOSConfig.event_filter = ('myplugin.mygroup.myeventgroup.' + 'myeventsubgroup.event2') HotSOSConfig.plugin_name = 'myplugin' class MyEventHandler(EventHandlerBase): @@ -241,15 +241,15 @@ class MyEventHandler(EventHandlerBase): def event_group(self): return 'mygroup' - prefix = 'mygroup.myplugin.myeventgroup.myeventsubgroup' defs = {'myeventsubgroup': { - 'event2': { - 'passthrough': False, - 'sequence': None, - 'tag': prefix + '.event2.search'}}} - self.assertEqual(MyEventHandler().event_definitions, defs) - - @utils.create_data_root({'events/myplugin/mygroup.yaml': EVENT_DEF_SIMPLE}) + 'event2': ('myplugin.mygroup.myeventgroup.myeventsubgroup.' + 'event2')}} + handler = MyEventHandler() + self.assertEqual(handler.events, defs) + self.assertEqual(len(handler.searcher.catalog), 1) + + @utils.create_data_root({'events/myplugin/mygroup.yaml': EVENT_DEF_SIMPLE, + 'a/path': 'content'}) def test_events_filter_nonexistent(self): HotSOSConfig.plugin_yaml_defs = HotSOSConfig.data_root HotSOSConfig.event_filter = 'blahblah' @@ -262,4 +262,6 @@ def event_group(self): return 'mygroup' defs = {} - self.assertEqual(MyEventHandler().event_definitions, defs) + handler = MyEventHandler() + self.assertEqual(handler.events, defs) + self.assertEqual(len(handler.searcher.catalog), 0) diff --git a/tests/unit/test_ycheck_properties.py b/tests/unit/test_ycheck_properties.py index 8e24d6789..32cfec938 100644 --- a/tests/unit/test_ycheck_properties.py +++ b/tests/unit/test_ycheck_properties.py @@ -282,7 +282,7 @@ def test_grouped_items_first_true(self): with open(tmpscenarios.path, 'w') as fd: fd.write(scenario) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual(issues[0]['message'], 'foo') @@ -309,7 +309,7 @@ def test_grouped_items_last_true(self): with open(tmpscenarios.path, 'w') as fd: fd.write(scenario) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual(issues[0]['message'], 'bar') @@ -336,7 +336,7 @@ def test_grouped_items_all_true(self): with open(tmpscenarios.path, 'w') as fd: fd.write(scenario) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual(issues[0]['message'], 'foo') @@ -362,7 +362,7 @@ def test_grouped_items_all_false(self): with open(tmpscenarios.path, 'w') as fd: fd.write(scenario) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() self.assertEqual(len(IssuesStore().load()), 0) @utils.create_data_root({'sos_commands/dpkg/dpkg_-l': @@ -391,7 +391,7 @@ def test_grouped_items_all_true_mixed_types_apt_first(self): with open(tmpscenarios.path, 'w') as fd: fd.write(scenario) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual(issues[0]['message'], 'foo') @@ -421,7 +421,7 @@ def test_grouped_items_all_true_mixed_types_snap_first(self): with open(tmpscenarios.path, 'w') as fd: fd.write(scenario) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual(issues[0]['message'], 'snapd') diff --git a/tests/unit/test_ycheck_scenarios.py b/tests/unit/test_ycheck_scenarios.py index a16e6532f..b2062a0e5 100644 --- a/tests/unit/test_ycheck_scenarios.py +++ b/tests/unit/test_ycheck_scenarios.py @@ -633,7 +633,7 @@ class TestYamlScenarios(utils.BaseTestCase): format(path=os.path.basename('data.txt'))) @utils.create_data_root({'data.txt': 'hello x\n'}) def test_yaml_def_expr_list(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues[0]), 3) i_types = [i['type'] for i in issues[0]] @@ -649,7 +649,7 @@ def test_yaml_def_expr_list(self): @utils.create_data_root({'data.txt': ("blah blah\nit's the start\nblah " "blah\nit's the end")}) def test_yaml_def_seq_search(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues[0]), 1) i_types = [i['type'] for i in issues[0]] @@ -673,7 +673,7 @@ def test_yaml_def_scenario_checks_false(self): self.assertFalse(check.result) # now run the scenarios - checker.load_and_run() + checker.run(load=False) self.assertEqual(IssuesManager().load_issues(), {}) @@ -701,7 +701,7 @@ def test_yaml_def_scenario_checks_requires(self): self.assertEqual(checked, 4) # now run the scenarios - checker.load_and_run() + checker.run(load=False) self.assertEqual(IssuesManager().load_issues(), {}) @@ -726,7 +726,7 @@ def test_yaml_def_scenario_checks_expr(self): self.assertTrue(check.result) # now run the scenarios - checker.run() + checker.run(load=False) msg = ("log matched 4 times (00:00:00.000, 00:32:00.000, " "00:33:00.000, 00:36:00.000)") @@ -795,7 +795,7 @@ def test_yaml_def_scenario_result_filters_by_period(self): @init_test_scenario(YDEF_NESTED_LOGIC) def test_yaml_def_nested_logic(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual(sorted([issue['message'] for issue in issues]), sorted(['conc1', 'conc3'])) @@ -819,7 +819,7 @@ def test_yaml_def_mapped_overrides(self): @init_test_scenario(SCENARIO_W_ERROR) def test_failed_scenario_caught(self, mock_log1, mock_log2, _mock_log3, mock_log4, mock_log5, mock_log6): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() # Check caught exception logs args = ('failed to import and call property %s', @@ -856,7 +856,7 @@ def test_failed_scenario_caught(self, mock_log1, mock_log2, _mock_log3, @init_test_scenario(CONFIG_SCENARIO) @utils.create_data_root({'test.conf': '[DEFAULT]\nkey1 = 101\n'}) def test_config_scenario_fail(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values())[0] self.assertEqual([issue['message'] for issue in issues], ['cfg is bad', 'cfg is bad2']) @@ -864,7 +864,7 @@ def test_config_scenario_fail(self): @init_test_scenario(CONFIG_SCENARIO) @utils.create_data_root({'test.conf': '[DEFAULT]\nkey1 = 102\n'}) def test_config_scenario_pass(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues), 0) @@ -875,7 +875,7 @@ def test_config_scenario_pass(self): @init_test_scenario(CONCLUSION_W_INVALID_BUG_RAISES) def test_raises_w_invalid_types(self, mock_exc, mock_log, mock_log2): mock_exc.side_effect = Exception - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() # Check caught exception logs args = ('caught exception when running scenario %s:', 'scenarioD') @@ -901,7 +901,7 @@ def test_raises_w_invalid_types(self, mock_exc, mock_log, mock_log2): @init_test_scenario(VARS) def test_vars(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues[0]), 4) msgs = [] @@ -926,7 +926,7 @@ def test_logical_collection_and_with_fail(self, mock_log1, mock_log2, _mock_log3, mock_log4, mock_log5, mock_log6, _mock_log7): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() expected = [ (mock_log1, ('failed to import and call property %s', @@ -962,13 +962,13 @@ def test_logical_collection_and_with_fail(self, mock_log1, mock_log2, @init_test_scenario(NESTED_LOGIC_TEST_NO_ISSUE) def test_logical_collection_nested_no_issue(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues), 0) @init_test_scenario(NESTED_LOGIC_TEST_W_ISSUE) def test_logical_collection_nested_w_issue(self): - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() issues = list(IssuesStore().load().values()) self.assertEqual(len(issues), 1) @@ -1006,7 +1006,7 @@ def reached(self, *args, **kwargs): OverrideRegistry.unregister([YPropertyConclusion]) try: OverrideRegistry.register([YPropertyConclusionTest]) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() finally: OverrideRegistry.unregister([YPropertyConclusionTest]) OverrideRegistry.register([YPropertyConclusion]) @@ -1027,7 +1027,7 @@ def reached(self, *args, **kwargs): OverrideRegistry.unregister([YPropertyConclusion]) try: OverrideRegistry.register([YPropertyConclusionTest]) - scenarios.YScenarioChecker().load_and_run() + scenarios.YScenarioChecker().run() finally: OverrideRegistry.unregister([YPropertyConclusionTest]) OverrideRegistry.register([YPropertyConclusion]) diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 1d3efab74..49158ff9b 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -12,6 +12,7 @@ from hotsos.core.issues import IssuesManager # disable for stestr otherwise output is much too verbose from hotsos.core.log import log, logging, LoggingManager +from hotsos.core.ycheck.events import EventsPreloader from hotsos.core.ycheck.scenarios import YScenarioChecker # Must be set prior to other imports @@ -177,7 +178,7 @@ def inner(test_inst): log.debug("running scenario under test") try: - YScenarioChecker().load_and_run() + YScenarioChecker().run() raised_issues = IssuesManager().load_issues() raised_bugs = IssuesManager().load_bugs() finally: @@ -451,6 +452,13 @@ def setUp(self): HotSOSConfig.debug_log_levels['searchkit'] = 'WARNING' LoggingManager().start(level=logging.WARNING) + # IMPORTANT: to avoid cross-pollution between tests we reset the event + # search registry before each test is run. We only do the reset stage + # here and defer the loading of searches and execution of the search + # to happen as part of test so that any env changes can be consumed + # properly. + EventsPreloader.reset() + def _addDuration(self, *args, **kwargs): # For Python >= 3.12 """ Python 3.12 needs subclasses of unittest.TestCase to implement this in order to record times and execute any cleanup actions once