Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove bare "except:" clauses except those that re-raise #351

Merged
merged 7 commits into from
Oct 23, 2023
2 changes: 1 addition & 1 deletion src/vunnel/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,6 @@ def load_plugins() -> None:
try:
logging.debug(f"loading provider plugin {tool.name!r}")
tool.load()
except: # noqa: E722
except Exception:
# note: this should not be fatal. Log and move on.
logging.exception(f"failed loading provider plugin {tool.name!r}")
10 changes: 5 additions & 5 deletions src/vunnel/providers/alpine/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,15 +86,15 @@ def _download(self):
if not os.path.exists(self.secdb_dir_path):
os.makedirs(self.secdb_dir_path, exist_ok=True)

self.logger.info("downloading alpine secdb metadata from: {}".format(self.metadata_url))
self.logger.info(f"downloading alpine secdb metadata from: {self.metadata_url}")
r = self._download_metadata_url()
try:
self.logger.debug("HTML parsing secdb landing page content for links")
parser = SecdbLandingParser()
parser.feed(r.text)
links = parser.links
except:
self.logger.warning("unable to html parse secdb landing page content for links")
except Exception:
self.logger.warning("unable to html parse secdb landing page content for links", exc_info=True)

if not links:
self.logger.debug("string parsing secdb landing page content for links")
Expand Down Expand Up @@ -135,8 +135,8 @@ def _download(self):

except KeyboardInterrupt:
raise
except:
self.logger.exception("ignoring error processing secdb for {}".format(link))
except Exception:
self.logger.exception(f"ignoring error processing secdb for {link}")

@utils.retry_with_backoff()
def _download_metadata_url(self) -> requests.Response:
Expand Down
9 changes: 5 additions & 4 deletions src/vunnel/providers/rhel/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def _sync_cves(self, skip_if_exists=False, do_full_sync=True): # noqa
last_full_sync = dt_parser.parse(fp.read())
if (now - last_full_sync).days < self.full_sync_interval:
do_full_sync = False
except:
except Exception:
self.logger.debug("ignoring error loading last_full_sync timestamp from disk", exc_info=True)
do_full_sync = True

Expand Down Expand Up @@ -282,7 +282,7 @@ def _fetch_rhsa_fix_version(self, rhsa_id, platform, package):
)
else:
self.logger.debug(f"{rhsa_id} not found for platform {platform}")
except:
except Exception:
self.logger.exception(f"error looking up {package} in {rhsa_id} for {platform}")

return fixed_ver, module_name
Expand Down Expand Up @@ -633,7 +633,7 @@ def _parse_package_state(self, cve_id: str, fixed: list[FixedIn], content) -> li
else:
self.logger.debug(f"{state!r} is an unknown state")
continue
except:
except Exception:
self.logger.exception(f"error parsing {cve_id} package state entity: {item}")

return affected + out_of_support
Expand Down Expand Up @@ -679,7 +679,8 @@ def _parse_cve(self, cve_id, content):
cvssv3.get("cvss3_base_score", None),
cvssv3.get("status", None),
)
except:
except Exception:
self.logger.info("unable to make cvss3, defaulting to None", exc_info=True)
cvssv3_obj = None

for item in nfins: # process not fixed in packages first as that trumps fixes
Expand Down
8 changes: 3 additions & 5 deletions src/vunnel/providers/sles/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,17 +379,15 @@ def parse(cls, xml_element, config: OVALParserConfig) -> SLESOVALVulnerability |
identity = xml_element.attrib["id"]

oval_ns_match = re.search(config.namespace_regex, xml_element.tag)
if oval_ns_match and len(oval_ns_match.groups()) > 0:
oval_ns = oval_ns_match.group(1)
else:
oval_ns = ""
oval_ns = oval_ns_match.group(1) if oval_ns_match and len(oval_ns_match.groups()) > 0 else ""

# def_version = xml_element.attrib["version"]
name = xml_element.find(config.title_xpath_query.format(oval_ns)).text
severity_element = xml_element.find(config.severity_xpath_query.format(oval_ns))
try:
severity = config.severity_map.get(severity_element.text.lower())
except:
except Exception:
cls.logger.info("unknown severity due to exception", exc_info=True)
severity = "Unknown"
# TODO temporary hack! sles 15 data was tripping this, figure out a better way
description = xml_element.find(config.description_xpath_query.format(oval_ns)).text.strip()
Expand Down
25 changes: 13 additions & 12 deletions src/vunnel/providers/ubuntu/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,9 @@ def _check(self, destination):

cmd = self._is_git_repo_cmd_
out = self._exec_cmd(cmd, cwd=destination)
self.logger.debug("check for git repository, cmd: {}, output: {}".format(cmd, out.decode()))
except:
self.logger.debug(f"git working tree not found at {destination}")
self.logger.debug(f"check for git repository, cmd: {cmd}, output: {out.decode()}")
except Exception:
self.logger.debug(f"git working tree not found at {destination}", exc_info=True)
return False

return True
Expand Down Expand Up @@ -146,21 +146,22 @@ def sync_with_upstream(self):
try:
self._exec_cmd(self._clean_cmd_, cwd=self.dest)
self._exec_cmd(self._reset_cmd_, cwd=self.dest)
except:
pass
except Exception:
self.logger.info("failed to clean and reset", exc_info=True)

self._exec_cmd(self._check_out_cmd_.format(branch=self.branch), cwd=self.dest)
except: # nosec
pass
except Exception:
self.logger.info(f"failed to run git checkout of {self.branch}", exc_info=True)

try:
out = self._exec_cmd(self._pull_ff_only_cmd_, cwd=self.dest)
self.logger.debug("synced with upstream git repo, output: {}".format(out.decode()))
self.logger.debug(f"synced with upstream git repo, output: {out.decode()}")
self._write_graph()
except UbuntuGitServer503Error:
raise
except: # nosec
except Exception:
# if something other than 503 occurred at this point just remove the repo and re-clone
self.logger.exception("unexpected exception syncing with upstream; will delete and re-clone")
self._delete_repo()
self._clone_repo()

Expand Down Expand Up @@ -207,14 +208,14 @@ def get_content(self, git_rev: GitRevision) -> list[str]:
cmd = self._get_rev_content_cmd_.format(sha=git_rev.sha, file=git_rev.file)
out = self._exec_cmd(cmd, cwd=self.dest)
return out.decode().splitlines()
except:
self.logger.exception("failed to get content for {} from git commit {}".format(git_rev.file, git_rev.sha))
except Exception:
self.logger.exception(f"failed to get content for {git_rev.file} from git commit {git_rev.sha}")

def get_current_rev(self) -> str:
try:
rev = self._exec_cmd(self._head_rev_cmd_, cwd=self.dest)
return rev.decode().strip() if isinstance(rev, bytes) else rev
except:
except Exception:
self.logger.exception("unable to get current git revision")

@staticmethod
Expand Down
3 changes: 2 additions & 1 deletion src/vunnel/providers/ubuntu/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,8 @@ def map_parsed(parsed_cve: CVEFile, logger: logging.Logger | None = None):
r = Vulnerability()
try:
r.Severity = getattr(Severity, parsed_cve.priority.capitalize())
except:
except Exception:
logger.exception("setting unknown severity due to exception getting severity")
r.Severity = Severity.Unknown

r.Name = parsed_cve.name
Expand Down
2 changes: 1 addition & 1 deletion src/vunnel/providers/wolfi/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _download(self):
fp.write(chunk)
else:
r.raise_for_status()
except: # noqa
except Exception:
self.logger.exception(f"ignoring error processing secdb for {self.url}")

def _load(self):
Expand Down
6 changes: 4 additions & 2 deletions src/vunnel/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,21 @@ def wrapper(*args: Any, **kwargs: Any) -> Any:
logger = logging.getLogger("utils:retry-with-backoff")
attempt = 0
while attempt < retries:
err = None
try:
return f(*args, **kwargs)
except KeyboardInterrupt:
logger.warning("keyboard interrupt, cancelling request...")
raise
except: # noqa: E722
except Exception as e:
err = e
if attempt >= retries:
logger.exception(f"failed after {retries} retries")
raise

# explanation of S311 disable: random number is not used for cryptography
sleep = backoff_in_seconds * 2**attempt + random.uniform(0, 1) # noqa: S311
logger.warning(f"{f} failed. Retrying in {int(sleep)} seconds (attempt {attempt+1} of {retries})")
logger.warning(f"{f} failed with {err}. Retrying in {int(sleep)} seconds (attempt {attempt+1} of {retries})")
time.sleep(sleep)
attempt += 1

Expand Down
2 changes: 1 addition & 1 deletion src/vunnel/utils/oval_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def parse(dest_file: str, config: Config, vuln_dict: dict | None = None):
elif event == "end" and re.search(config.tag_pattern, element.tag).group(1) == "definition":
try:
_process_definition(element, vuln_dict, config)
except:
except Exception:
logger.exception("Error parsing oval record. Logging error and continuing")
finally:
processing = False
Expand Down
4 changes: 3 additions & 1 deletion src/vunnel/utils/oval_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,7 @@ def _parse_group(criteria_element: ET.Element, config: OVALParserConfig) -> list
</criteria>
"""
results = []
logger = logging.getLogger("oval-v2-parser")

# further parsing makes the assumption that this element has 2 children, bail out of here if that's not true
if len(criteria_element) != 2:
Expand All @@ -190,7 +191,8 @@ def _parse_group(criteria_element: ET.Element, config: OVALParserConfig) -> list

try:
test_ids = VulnerabilityParser._parse_sub_group(criteria_element[1], config, config.artifact_regex)
except:
except Exception:
logger.exception("returning results early due to exception in _parse_sub_group")
return results

if not test_ids:
Expand Down
Loading