Skip to content

Commit

Permalink
Merge branch 'main' of github.com:anchore/vunnel into fix/respect-git…
Browse files Browse the repository at this point in the history
…hub-rate-limit-headers
  • Loading branch information
willmurphyscode committed Oct 31, 2023
2 parents 11d11e2 + 984b5cb commit 9fc9c9f
Show file tree
Hide file tree
Showing 14 changed files with 44 additions and 29 deletions.
5 changes: 1 addition & 4 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ permissions:

jobs:
quality-gate:
environment: release
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 #v4.1.1
Expand Down Expand Up @@ -59,7 +60,6 @@ jobs:
needs:
- quality-gate
runs-on: ubuntu-22.04
environment: release
permissions:
contents: write
packages: write
Expand All @@ -84,7 +84,6 @@ jobs:
needs:
- tag
runs-on: ubuntu-22.04
environment: release
permissions:
contents: read
steps:
Expand All @@ -105,7 +104,6 @@ jobs:
needs:
- tag
runs-on: ubuntu-22.04
environment: release
permissions:
contents: read
packages: write
Expand Down Expand Up @@ -133,7 +131,6 @@ jobs:
needs:
- tag
runs-on: ubuntu-22.04
environment: release
permissions:
contents: write
packages: write
Expand Down
34 changes: 29 additions & 5 deletions poetry.lock

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion src/vunnel/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ def cli(ctx: click.core.Context, verbose: bool, config_path: str) -> None:
def show_config(cfg: config.Application) -> None:
logging.info("showing application config")

# noqa
class IndentDumper(yaml.Dumper):
def increase_indent(self, flow: bool = False, indentless: bool = False) -> None: # noqa: ARG002
return super().increase_indent(flow, False)
Expand Down
2 changes: 1 addition & 1 deletion src/vunnel/providers/amazon/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def get_package_name_version(pkg):
if not pkg.endswith(".rpm"):
pkg = pkg + ".rpm"

name, version, release, epoch, arch = rpm.split_rpm_filename(pkg) # noqa
name, version, release, epoch, arch = rpm.split_rpm_filename(pkg)

if release:
return AlasFixedIn(pkg=name, ver=(version + "-" + release))
Expand Down
5 changes: 2 additions & 3 deletions src/vunnel/providers/debian/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ def _get_cve_to_dsalist(self, dsa):

return ns_cve_dsalist

# noqa
def _parse_dsa_record(self, dsa_lines): # noqa: C901
"""
Expand Down Expand Up @@ -188,7 +187,7 @@ def _parse_dsa_record(self, dsa_lines): # noqa: C901
continue

return dsa
except Exception: # noqa
except Exception:
self.logger.exception("failed to parse dsa record")

def _get_dsa_map(self):
Expand Down Expand Up @@ -444,7 +443,7 @@ def _normalize_json(self, ns_cve_dsalist=None): # noqa: PLR0912,PLR0915,C901

# retlists[relno].append(final_record)

except Exception: # noqa
except Exception:
self.logger.exception(f"ignoring error parsing vuln: {vid}, pkg: {pkg}, rel: {rel}")

self.logger.debug(f"metrics for advisory information: {json.dumps(adv_mets)}")
Expand Down
5 changes: 2 additions & 3 deletions src/vunnel/providers/github/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__( # noqa: PLR0913
download_timeout=125,
api_url="https://api.github.com/graphql",
logger=None,
): # noqa
):
self.db = db.connection(workspace.input_path, serializer="json")
self.download_timeout = download_timeout
self.api_url = api_url
Expand Down Expand Up @@ -253,7 +253,7 @@ def get_advisory(ghsaId, data):
return {}


def get_vulnerabilities(token, ghsaId, timestamp, vuln_cursor, parent_cursor): # noqa
def get_vulnerabilities(token, ghsaId, timestamp, vuln_cursor, parent_cursor):
"""
In the improbable case that an Advisory is associated with more than 100
(Github's GraphQL limit) these will need to get fetched until the cursor is
Expand Down Expand Up @@ -343,7 +343,6 @@ def needs_subquery(data):
return False


# noqa
def graphql_advisories(cursor=None, timestamp=None, vuln_cursor=None):
"""
The cursor needs to be the `endCursor` for the last successful query. The
Expand Down
4 changes: 2 additions & 2 deletions src/vunnel/providers/oracle/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def _is_ksplice_version(cls, version) -> bool:
:param version:
:return:
"""
epoch, version, release = rpm.split_fullversion(version) # noqa
epoch, version, release = rpm.split_fullversion(version)
return cls.ksplice_regex.match(release) is not None

def filter(self, vuln_dict: dict) -> dict: # noqa: A003
Expand All @@ -135,7 +135,7 @@ def filter(self, vuln_dict: dict) -> dict: # noqa: A003
:param vuln_dict: dict of vulns where key is distro and version and value is the list of vulns for that version
:return:
"""
for version, vuln in vuln_dict.values(): # noqa
for version, vuln in vuln_dict.values(): # noqa: B007
fixes = vuln.get("Vulnerability", {}).get("FixedIn", [])
if fixes:
pre_filter_fix_count = len(fixes)
Expand Down
2 changes: 1 addition & 1 deletion src/vunnel/providers/rhel/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def _process_minimal_cve(self, min_cve_api, do_full_sync, min_cve_dir, full_cve_
raise # raise the original exception

# TODO: ALEX, should skip_if_exists be hooked up here? (currently unused)
def _sync_cves(self, skip_if_exists=False, do_full_sync=True): # noqa
def _sync_cves(self, skip_if_exists=False, do_full_sync=True): # noqa: PLR0915, PLR0912, C901
"""
Download minimal or summary cve and compare it to persisted state on disk. If no persisted state is found or a
a change is detected, full cve is downloaded
Expand Down
2 changes: 1 addition & 1 deletion src/vunnel/providers/sles/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def _transform_oval_vulnerabilities(cls, major_version: str, parsed_dict: dict)
if not vulnerabilities_dict or not tests_dict or not artifacts_dict or not versions_dict:
return results

for identity, vulnerability_obj in vulnerabilities_dict.items(): # noqa
for identity, vulnerability_obj in vulnerabilities_dict.items(): # noqa: B007
# version->release->feed map
version_release_feed = defaultdict()

Expand Down
1 change: 0 additions & 1 deletion src/vunnel/providers/wolfi/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,6 @@ def _load(self):
self.logger.exception(f"failed to load {self.namespace} sec db data")
raise

# noqa
def _normalize(self, release, data):
"""
Normalize all the sec db entries into vulnerability payload records
Expand Down
2 changes: 1 addition & 1 deletion src/vunnel/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def close(self, successful: bool) -> None:


class Writer:
def __init__( # noqa
def __init__( # noqa: PLR0913
self,
workspace: Workspace,
result_state_policy: ResultStatePolicy,
Expand Down
1 change: 0 additions & 1 deletion src/vunnel/utils/vulnerability.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# noqa
from __future__ import annotations

from dataclasses import asdict, dataclass, field
Expand Down
1 change: 0 additions & 1 deletion tests/quality/configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,6 @@ def cli(ctx, verbose: bool, config_path: str):
def show_config(cfg: Config):
logging.info("showing application config")

# noqa
class IndentDumper(yaml.Dumper):
def increase_indent(self, flow: bool = False, indentless: bool = False) -> None: # noqa: ARG002
return super().increase_indent(flow, False)
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/providers/github/test_github.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def test_no_cursor_no_timestamp(self):
assert (
line
== "securityAdvisories(orderBy: {field: PUBLISHED_AT, direction: ASC}, classifications: [GENERAL, MALWARE], first: 100) {"
) # noqa
)

def test_no_cursor_with_timestamp_changes_field(self):
# first run after a successful run
Expand All @@ -307,7 +307,7 @@ def test_cursor_no_timestamp(self):
assert (
line
== 'securityAdvisories(orderBy: {field: PUBLISHED_AT, direction: ASC}, after: "FXXF==", classifications: [GENERAL, MALWARE], first: 100) {'
) # noqa
)

def test_cursor_with_timestamp(self):
# subsequent request after a successful run(s) because a timestamp has
Expand All @@ -318,15 +318,15 @@ def test_cursor_with_timestamp(self):
assert (
line
== ', after: "FXXF==", updatedSince: "2019-02-06T20:44:12.371565", classifications: [GENERAL, MALWARE], first: 100) {'
) # noqa
)

def test_cursor_with_timestamp_changes_field(self):
# subsequent request after a successful run(s) because a timestamp has
# been recorded
result = parser.graphql_advisories(cursor="FXXF==", timestamp="2019-02-06T20:44:12.371565")
line = result.split("\n")[2].strip()
line = line.split("}")[0]
assert line == "securityAdvisories(orderBy: {field: UPDATED_AT, direction: ASC" # noqa
assert line == "securityAdvisories(orderBy: {field: UPDATED_AT, direction: ASC"


class TestNeedsSubquery:
Expand Down

0 comments on commit 9fc9c9f

Please sign in to comment.