diff --git a/StdAny/StdAny/.clang-tidy b/.clang-tidy
similarity index 88%
rename from StdAny/StdAny/.clang-tidy
rename to .clang-tidy
index 58a635e..287ef9f 100644
--- a/StdAny/StdAny/.clang-tidy
+++ b/.clang-tidy
@@ -1,6 +1,6 @@
Checks: 'clang-diagnostic-*,clang-analyzer-*,-*,performance-*,bugprone-*,clang-analyzer-*,mpi-*,misc-*,readability-*'
WarningsAsErrors: ''
-HeaderFilterRegex: ''
+HeaderFilterRegex: '.*'
AnalyzeTemporaryDtors: false
FormatStyle: 'file'
CheckOptions:
@@ -184,4 +184,30 @@ CheckOptions:
value: ''
- key: readability-identifier-naming.ClassCase
value: CamelCase
+ - key: readability-identifier-naming.ClassMemberPrefix
+ value: s_
+ - key: readability-identifier-naming.ClassMemberCase
+ value: camelBack
+ - key: readability-identifier-naming.MemberPrefix
+ value: m_
+ - key: readability-identifier-naming.MemberCase
+ value: camelBack
+ - key: readability-identifier-naming.ParameterCase
+ value: lower_case
+ - key: readability-identifier-naming.MethodCase
+ value: camelBack
+ - key: readability-identifier-naming.ConstantParameterCase
+ value: lower_case
+ - key: readability-identifier-naming.EnumCase
+ value: CamelCase
+ - key: readability-identifier-naming.EnumConstantCase
+ value: camelBack
+ - key: readability-identifier-naming.NamespaceCase
+ value: CamelCase
+ - key: readability-identifier-naming.VariableCase
+ value: lower_case
+ - key: readability-identifier-naming.MacroDefinitionCase
+ value: UPPER_CASE
+ - key: readability-identifier-naming.FunctionCase
+ value: camelBack
diff --git a/.github/actions/clang-tidy-review/.dockerignore b/.github/actions/clang-tidy-review/.dockerignore
new file mode 100644
index 0000000..c1c5eef
--- /dev/null
+++ b/.github/actions/clang-tidy-review/.dockerignore
@@ -0,0 +1,2 @@
+**/venv
+**/__pycache__
diff --git a/.github/actions/clang-tidy-review/.gitignore b/.github/actions/clang-tidy-review/.gitignore
new file mode 100644
index 0000000..fe44426
--- /dev/null
+++ b/.github/actions/clang-tidy-review/.gitignore
@@ -0,0 +1,167 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv*/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+# Generated by review action
+clang-tidy-review-output.json
+clang-tidy-review-metadata.json
+
+# Generated by clang-tidy
+clang_tidy_review.yaml
diff --git a/.github/actions/clang-tidy-review/Dockerfile b/.github/actions/clang-tidy-review/Dockerfile
new file mode 100644
index 0000000..81dc1b0
--- /dev/null
+++ b/.github/actions/clang-tidy-review/Dockerfile
@@ -0,0 +1,21 @@
+FROM ubuntu:24.04
+
+RUN apt update && \
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y --no-install-recommends\
+ build-essential cmake git \
+ tzdata \
+ clang-tidy-14 \
+ clang-tidy-15 \
+ clang-tidy-16 \
+ clang-tidy-17 \
+ clang-tidy-18 \
+ python3 \
+ python3-pip \
+ && rm -rf /var/lib/apt/lists/
+
+COPY . /clang_tidy_review/
+
+RUN python3 -m pip install --break-system-packages /clang_tidy_review/post/clang_tidy_review
+
+ENTRYPOINT ["review"]
diff --git a/.github/actions/clang-tidy-review/LICENSE b/.github/actions/clang-tidy-review/LICENSE
new file mode 100644
index 0000000..a042b73
--- /dev/null
+++ b/.github/actions/clang-tidy-review/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Peter Hill
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/.github/actions/clang-tidy-review/action.yml b/.github/actions/clang-tidy-review/action.yml
new file mode 100644
index 0000000..df59943
--- /dev/null
+++ b/.github/actions/clang-tidy-review/action.yml
@@ -0,0 +1,95 @@
+name: 'clang-tidy review'
+author: 'Peter Hill'
+description: 'Create a pull request review based on warnings from clang-tidy'
+branding:
+ icon: 'book-open'
+ color: 'red'
+inputs:
+ token:
+ description: 'Authentication token'
+ default: ${{ github.token }}
+ required: false
+ build_dir:
+ description: 'Directory containing the compile_commands.json file'
+ default: '.'
+ required: false
+ base_dir:
+ description: 'Absolute path to initial working directory. Useful if generating `compile_commands.json` outside of the Action'
+ default: ${{ github.workspace }}
+ require: false
+ clang_tidy_version:
+ description: 'Version of clang-tidy to use; one of 14, 15, 16, 17, 18'
+ default: '18'
+ required: false
+ clang_tidy_checks:
+ description: 'List of checks'
+ default: '-*,performance-*,readability-*,bugprone-*,clang-analyzer-*,cppcoreguidelines-*,mpi-*,misc-*'
+ required: false
+ config_file:
+ description: 'Location of .clang-tidy config file. If specified, takes preference over `clang_tidy_checks`'
+ default: ''
+ required: false
+ include:
+ description: 'Comma-separated list of files or patterns to include'
+ default: "*.[ch],*.[ch]xx,*.[ch]pp,*.[ch]++,*.cc,*.hh"
+ required: false
+ exclude:
+ description: 'Comma-separated list of files or patterns to exclude'
+ required: false
+ default: ''
+ apt_packages:
+ description: 'Comma-separated list of apt packages to install'
+ required: false
+ default: ''
+ cmake_command:
+ description: 'If set, run CMake as part of the action using this command'
+ required: false
+ default: ''
+ max_comments:
+ description: 'Maximum number of comments to post at once'
+ required: false
+ default: '25'
+ lgtm_comment_body:
+ description: 'Message to post on PR if no issues are found. An empty string will post no LGTM comment.'
+ required: false
+ default: 'clang-tidy review says "All clean, LGTM! :+1:"'
+ split_workflow:
+ description: "Only generate but don't post the review, leaving it for the second workflow. Relevant when receiving PRs from forks that don't have the required permissions to post reviews."
+ required: false
+ default: false
+ annotations:
+ description: "Use annotations instead of comments. See README for limitations on annotations"
+ required: false
+ default: false
+ parallel:
+ description: "Number of tidy instances to be run in parallel. Zero will automatically determine the right number."
+ required: false
+ default: "0"
+ pr:
+ default: ${{ github.event.pull_request.number }}
+ repo:
+ default: ${{ github.repository }}
+outputs:
+ total_comments:
+ description: 'Total number of warnings from clang-tidy'
+runs:
+ using: 'docker'
+ image: 'Dockerfile'
+ args:
+ - --clang_tidy_binary=clang-tidy-${{ inputs.clang_tidy_version }}
+ - --token=${{ inputs.token }}
+ - --repo=${{ inputs.repo }}
+ - --pr=${{ inputs.pr }}
+ - --build_dir=${{ inputs.build_dir }}
+ - --base_dir=${{ inputs.base_dir }}
+ - --clang_tidy_checks=${{ inputs.clang_tidy_checks }}
+ - --config_file=${{ inputs.config_file }}
+ - --include='${{ inputs.include }}'
+ - --exclude='${{ inputs.exclude }}'
+ - --apt-packages=${{ inputs.apt_packages }}
+ - --cmake-command='${{ inputs.cmake_command }}'
+ - --max-comments=${{ inputs.max_comments }}
+ - --lgtm-comment-body='${{ inputs.lgtm_comment_body }}'
+ - --split_workflow=${{ inputs.split_workflow }}
+ - --annotations=${{ inputs.annotations }}
+ - --parallel=${{ inputs.parallel }}
diff --git a/.github/actions/clang-tidy-review/post/Dockerfile b/.github/actions/clang-tidy-review/post/Dockerfile
new file mode 100644
index 0000000..23371e7
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/Dockerfile
@@ -0,0 +1,8 @@
+FROM python:3
+
+COPY clang_tidy_review /clang_tidy_review
+
+RUN pip3 install --upgrade pip && \
+ pip3 install /clang_tidy_review
+
+ENTRYPOINT ["post"]
diff --git a/.github/actions/clang-tidy-review/post/README.md b/.github/actions/clang-tidy-review/post/README.md
new file mode 100644
index 0000000..11f8cc9
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/README.md
@@ -0,0 +1,3 @@
+# Clang-Tidy Review - Post
+
+This is a child-action that only posts the review from the [parent action](../README.md).
diff --git a/.github/actions/clang-tidy-review/post/action.yml b/.github/actions/clang-tidy-review/post/action.yml
new file mode 100644
index 0000000..df2e582
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/action.yml
@@ -0,0 +1,46 @@
+name: 'clang-tidy review - post comments'
+author: 'Peter Hill'
+description: 'Create a pull request review based on warnings produced by the parent action'
+branding:
+ icon: 'book-open'
+ color: 'red'
+inputs:
+ token:
+ description: 'Authentication token'
+ default: ${{ github.token }}
+ required: false
+ repo:
+ default: ${{ github.repository }}
+ max_comments:
+ description: 'Maximum number of comments to post at once'
+ required: false
+ default: '25'
+ lgtm_comment_body:
+ description: 'Message to post on PR if no issues are found. An empty string will post no LGTM comment.'
+ required: false
+ default: 'clang-tidy review says "All clean, LGTM! :+1:"'
+ annotations:
+ description: "Use annotations instead of comments. See README for limitations on annotations"
+ required: false
+ default: false
+ num_comments_as_exitcode:
+ description: "Set the exit code to be the amount of comments"
+ required: false
+ default: 'true'
+ workflow_id:
+ description: 'ID of the review workflow'
+ default: ${{ github.event.workflow_run.id }}
+outputs:
+ total_comments:
+ description: 'Total number of warnings from clang-tidy'
+runs:
+ using: 'docker'
+ image: 'Dockerfile'
+ args:
+ - --token=${{ inputs.token }}
+ - --repo=${{ inputs.repo }}
+ - --max-comments=${{ inputs.max_comments }}
+ - --lgtm-comment-body='${{ inputs.lgtm_comment_body }}'
+ - --workflow_id=${{ inputs.workflow_id }}
+ - --annotations=${{ inputs.annotations }}
+ - --num-comments-as-exitcode=${{ inputs.num_comments_as_exitcode }}
diff --git a/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/__init__.py b/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/__init__.py
new file mode 100644
index 0000000..35e6393
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/__init__.py
@@ -0,0 +1,1419 @@
+# clang-tidy review
+# Copyright (c) 2020 Peter Hill
+# SPDX-License-Identifier: MIT
+# See LICENSE for more information
+
+import argparse
+import base64
+import contextlib
+import datetime
+import fnmatch
+import io
+import itertools
+import json
+import multiprocessing
+import os
+import pathlib
+import pprint
+import queue
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import threading
+import zipfile
+from operator import itemgetter
+from pathlib import Path
+from typing import Any, Dict, List, Optional, TypedDict
+
+import unidiff
+import urllib3
+import yaml
+from github import Auth, Github
+from github.PaginatedList import PaginatedList
+from github.PullRequest import ReviewComment
+from github.Requester import Requester
+from github.WorkflowRun import WorkflowRun
+
+DIFF_HEADER_LINE_LENGTH = 5
+FIXES_FILE = Path("clang_tidy_review.yaml")
+METADATA_FILE = Path("clang-tidy-review-metadata.json")
+REVIEW_FILE = Path("clang-tidy-review-output.json")
+PROFILE_DIR = Path("clang-tidy-review-profile")
+MAX_ANNOTATIONS = 10
+
+
+class Metadata(TypedDict):
+ """Loaded from `METADATA_FILE`
+ Contains information necessary to post a review without pull request knowledge
+
+ """
+
+ pr_number: int
+
+
+class PRReview(TypedDict):
+ body: str
+ event: str
+ comments: list[ReviewComment]
+
+
+class HashableComment:
+ def __init__(self, body: str, line: int, path: str, side: str, **kwargs):
+ self.body = body
+ self.line = line
+ self.path = path
+ self.side = side
+
+ def __hash__(self):
+ return hash(
+ (
+ self.body,
+ self.line,
+ self.path,
+ self.side,
+ )
+ )
+
+ def __eq__(self, other):
+ return (
+ type(self) is type(other)
+ and self.body == other.body
+ and self.line == self.line
+ and other.path == other.path
+ and self.side == other.side
+ )
+
+ def __lt__(self, other):
+ if self.path != other.path:
+ return self.path < other.path
+ if self.line != other.line:
+ return self.line < other.line
+ if self.side != other.side:
+ return self.side < other.side
+ if self.body != other.body:
+ return self.body < other.body
+ return id(self) < id(other)
+
+
+def add_auth_arguments(parser: argparse.ArgumentParser):
+ # Token
+ parser.add_argument("--token", help="github auth token")
+ # App
+ group_app = parser.add_argument_group(
+ """Github app installation authentication
+Permissions required: Contents (Read) and Pull requests (Read and Write)"""
+ )
+ group_app.add_argument("--app-id", type=int, help="app ID")
+ group_app.add_argument(
+ "--private-key", type=str, help="app private key as a string"
+ )
+ group_app.add_argument(
+ "--private-key-base64",
+ type=str,
+ help="app private key as a string encoded as base64",
+ )
+ group_app.add_argument(
+ "--private-key-file-path",
+ type=pathlib.Path,
+ help="app private key .pom file path",
+ )
+ group_app.add_argument("--installation-id", type=int, help="app installation ID")
+
+
+def get_auth_from_arguments(args: argparse.Namespace) -> Auth.Auth:
+ if args.token:
+ return Auth.Token(args.token)
+
+ if (
+ args.app_id
+ and (args.private_key or args.private_key_file_path or args.private_key_base64)
+ and args.installation_id
+ ):
+ if args.private_key:
+ private_key = args.private_key
+ elif args.private_key_base64:
+ private_key = base64.b64decode(args.private_key_base64).decode("ascii")
+ else:
+ private_key = pathlib.Path(args.private_key_file_path).read_text()
+ return Auth.AppAuth(args.app_id, private_key).get_installation_auth(
+ args.installation_id
+ )
+ if (
+ args.app_id
+ or args.private_key
+ or args.private_key_file_path
+ or args.private_key_base64
+ or args.installation_id
+ ):
+ raise argparse.ArgumentError(
+ None,
+ "--app-id, --private-key[-file-path|-base64] and --installation-id must be supplied together",
+ )
+
+ raise argparse.ArgumentError(None, "authentication method not supplied")
+
+
+def build_clang_tidy_warnings(
+ base_invocation: List,
+ env: dict,
+ tmpdir: Path,
+ task_queue: queue.Queue,
+ lock: threading.Lock,
+ failed_files: List,
+) -> None:
+ """Run clang-tidy on the given files and save output into a temporary file"""
+
+ while True:
+ name = task_queue.get()
+ invocation = base_invocation[:]
+
+ # Get a temporary file. We immediately close the handle so clang-tidy can
+ # overwrite it.
+ (handle, fixes_file) = tempfile.mkstemp(suffix=".yaml", dir=tmpdir)
+ os.close(handle)
+ invocation.append(f"--export-fixes={fixes_file}")
+
+ invocation.append(name)
+
+ proc = subprocess.Popen(
+ invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
+ )
+ output, err = proc.communicate()
+
+ if proc.returncode != 0:
+ if proc.returncode < 0:
+ msg = f"{name}: terminated by signal {-proc.returncode}\n"
+ err += msg.encode("utf-8")
+ failed_files.append(name)
+ with lock:
+ subprocess.list2cmdline(invocation)
+ sys.stdout.write(
+ f'{name}: {subprocess.list2cmdline(invocation)}\n{output.decode("utf-8")}'
+ )
+ if len(err) > 0:
+ sys.stdout.flush()
+ sys.stderr.write(err.decode("utf-8"))
+
+ task_queue.task_done()
+
+
+def clang_tidy_version(clang_tidy_binary: pathlib.Path):
+ try:
+ version_out = subprocess.run(
+ [clang_tidy_binary, "--version"],
+ capture_output=True,
+ check=True,
+ text=True,
+ ).stdout
+ except subprocess.CalledProcessError as e:
+ print(f"\n\nWARNING: Couldn't get clang-tidy version, error was: {e}")
+ return 0
+
+ if version := re.search(r"version (\d+)", version_out):
+ return int(version.group(1))
+
+ print(
+ f"\n\nWARNING: Couldn't get clang-tidy version number, '{clang_tidy_binary} --version' reported: {version_out}"
+ )
+ return 0
+
+
+def config_file_or_checks(
+ clang_tidy_binary: pathlib.Path, clang_tidy_checks: str, config_file: str
+) -> Optional[str]:
+ version = clang_tidy_version(clang_tidy_binary)
+
+ if config_file == "":
+ if clang_tidy_checks:
+ return f"--checks={clang_tidy_checks}"
+ return None
+
+ if version >= 12:
+ return f"--config-file={config_file}"
+
+ if config_file != ".clang-tidy":
+ print(
+ f"\n\nWARNING: non-default config file name '{config_file}' will be ignored for "
+ "selected clang-tidy version {version}. This version expects exactly '.clang-tidy'\n"
+ )
+
+ return "--config"
+
+
+def merge_replacement_files(tmpdir: Path, mergefile: Path):
+ """Merge all replacement files in a directory into a single file"""
+ # The fixes suggested by clang-tidy >= 4.0.0 are given under
+ # the top level key 'Diagnostics' in the output yaml files
+ mergekey = "Diagnostics"
+ merged = []
+ for replacefile in tmpdir.glob("*.yaml"):
+ with replacefile.open() as f:
+ content = yaml.safe_load(f)
+ if not content:
+ continue # Skip empty files.
+ merged.extend(content.get(mergekey, []))
+
+ if merged:
+ # MainSourceFile: The key is required by the definition inside
+ # include/clang/Tooling/ReplacementsYaml.h, but the value
+ # is actually never used inside clang-apply-replacements,
+ # so we set it to '' here.
+ output = {"MainSourceFile": "", mergekey: merged}
+ with mergefile.open("w") as out:
+ yaml.safe_dump(output, out)
+
+
+def load_clang_tidy_warnings(fixes_file: Path) -> Dict:
+ """Read clang-tidy warnings from fixes_file. Can be produced by build_clang_tidy_warnings"""
+ try:
+ with fixes_file.open() as f:
+ return yaml.safe_load(f)
+ except FileNotFoundError:
+ return {}
+
+
+class PullRequest:
+ """Add some convenience functions not in PyGithub"""
+
+ def __init__(self, repo: str, pr_number: Optional[int], auth: Auth.Auth) -> None:
+ self.repo_name = repo
+ self.pr_number = pr_number
+ self.auth = auth
+
+ # Choose API URL, default to public GitHub
+ self.api_url = os.environ.get("GITHUB_API_URL", "https://api.github.com")
+
+ github = Github(auth=self.auth, base_url=self.api_url)
+ self.repo = github.get_repo(f"{repo}")
+ self._pull_request = None
+
+ @property
+ def token(self):
+ return self.auth.token
+
+ @property
+ def pull_request(self):
+ if self._pull_request is None:
+ if self.pr_number is None:
+ raise RuntimeError("Missing PR number")
+
+ self._pull_request = self.repo.get_pull(int(self.pr_number))
+ return self._pull_request
+
+ @property
+ def head_sha(self):
+ if self._pull_request is None:
+ raise RuntimeError("Missing PR")
+
+ return self._pull_request.get_commits().reversed[0].sha
+
+ def get_pr_diff(self) -> List[unidiff.PatchedFile]:
+ """Download the PR diff, return a list of PatchedFile"""
+
+ _, data = self.repo._requester.requestJsonAndCheck(
+ "GET",
+ self.pull_request.url,
+ headers={"Accept": f"application/vnd.github.{'v3.diff'}"},
+ )
+ if not data:
+ return []
+
+ diffs = data["data"]
+
+ # PatchSet is the easiest way to construct what we want, but the
+ # diff_line_no property on lines is counted from the top of the
+ # whole PatchSet, whereas GitHub is expecting the "position"
+ # property to be line count within each file's diff. So we need to
+ # do this little bit of faff to get a list of file-diffs with
+ # their own diff_line_no range
+ return [unidiff.PatchSet(str(file))[0] for file in unidiff.PatchSet(diffs)]
+
+ def get_pr_author(self) -> str:
+ """Get the username of the PR author. This is used in google-readability-todo"""
+ return self.pull_request.user.login
+
+ def get_pr_comments(self):
+ """Download the PR review comments using the comfort-fade preview headers"""
+
+ def get_element(
+ requester: Requester, headers: dict, element: dict, completed: bool
+ ):
+ return element
+
+ return PaginatedList(
+ get_element,
+ self.pull_request._requester,
+ self.pull_request.review_comments_url,
+ None,
+ )
+
+ def post_lgtm_comment(self, body: str):
+ """Post a "LGTM" comment if everything's clean, making sure not to spam"""
+
+ if not body:
+ return
+
+ comments = self.get_pr_comments()
+
+ for comment in comments:
+ if comment["body"] == body:
+ print("Already posted, no need to update")
+ return
+
+ self.pull_request.create_issue_comment(body)
+
+ def post_review(self, review: PRReview):
+ """Submit a completed review"""
+ self.pull_request.create_review(**review)
+
+ def post_annotations(self, review):
+ headers = {
+ "Accept": "application/vnd.github+json",
+ "Authorization": f"Bearer {self.token}",
+ }
+ url = f"{self.api_url}/repos/{self.repo_name}/check-runs"
+
+ self.repo._requester.requestJsonAndCheck(
+ "POST", url, parameters=review, headers=headers
+ )
+
+
+@contextlib.contextmanager
+def message_group(title: str):
+ print(f"::group::{title}", flush=True)
+ try:
+ yield
+ finally:
+ print("::endgroup::", flush=True)
+
+
+def make_file_line_lookup(diff):
+ """Get a lookup table for each file in diff, to convert between source
+ line number to line number in the diff
+
+ """
+ lookup = {}
+ for file in diff:
+ filename = file.target_file[2:]
+ lookup[filename] = {}
+ for hunk in file:
+ for line in hunk:
+ if line.diff_line_no is None:
+ continue
+ if not line.is_removed:
+ lookup[filename][line.target_line_no] = (
+ line.diff_line_no - DIFF_HEADER_LINE_LENGTH
+ )
+ return lookup
+
+
+def make_file_offset_lookup(filenames):
+ """Create a lookup table to convert between character offset and line
+ number for the list of files in `filenames`.
+
+ This is a dict of the cumulative sum of the line lengths for each file.
+
+ """
+ lookup = {}
+
+ for filename in filenames:
+ with Path(filename).open() as file:
+ lines = file.readlines()
+ # Length of each line
+ line_lengths = map(len, lines)
+ # Cumulative sum of line lengths => offset at end of each line
+ lookup[Path(filename).resolve().as_posix()] = [
+ 0,
+ *list(itertools.accumulate(line_lengths)),
+ ]
+
+ return lookup
+
+
+def get_diagnostic_file_path(clang_tidy_diagnostic, build_dir):
+ # Sometimes, clang-tidy gives us an absolute path, so everything is fine.
+ # Sometimes however it gives us a relative path that is realtive to the
+ # build directory, so we prepend that.
+
+ # Modern clang-tidy
+ if ("DiagnosticMessage" in clang_tidy_diagnostic) and (
+ "FilePath" in clang_tidy_diagnostic["DiagnosticMessage"]
+ ):
+ file_path = clang_tidy_diagnostic["DiagnosticMessage"]["FilePath"]
+ if file_path == "":
+ return ""
+ file_path = Path(file_path)
+ if file_path.is_absolute():
+ return os.path.normpath(file_path.resolve())
+ if "BuildDirectory" in clang_tidy_diagnostic:
+ return os.path.normpath(
+ (Path(clang_tidy_diagnostic["BuildDirectory"]) / file_path).resolve()
+ )
+ return os.path.normpath(file_path.resolve())
+
+ # Pre-clang-tidy-9 format
+ if "FilePath" in clang_tidy_diagnostic:
+ file_path = clang_tidy_diagnostic["FilePath"]
+ if file_path == "":
+ return ""
+ return os.path.normpath((Path(build_dir) / file_path).resolve())
+
+ return ""
+
+
+def find_line_number_from_offset(offset_lookup, filename, offset):
+ """Work out which line number `offset` corresponds to using `offset_lookup`.
+
+ The line number (0-indexed) is the index of the first line offset
+ which is larger than `offset`.
+
+ """
+ name = str(pathlib.Path(filename).resolve().absolute())
+
+ if name not in offset_lookup:
+ # Let's make sure we've the file offsets for this other file
+ offset_lookup.update(make_file_offset_lookup([name]))
+
+ for line_num, line_offset in enumerate(offset_lookup[name]):
+ if line_offset > offset:
+ return line_num - 1
+ return -1
+
+
+def read_one_line(filename, line_offset):
+ """Read a single line from a source file"""
+ # Could cache the files instead of opening them each time?
+ with Path(filename).open() as file:
+ file.seek(line_offset)
+ return file.readline().rstrip("\n")
+
+
+def collate_replacement_sets(diagnostic, offset_lookup):
+ """Return a dict of replacements on the same or consecutive lines, indexed by line number
+
+ We need this as we have to apply all the replacements on one line at the same time
+
+ This could break if there are replacements in with the same line
+ number but in different files.
+
+ """
+
+ # First, make sure each replacement contains "LineNumber", and
+ # "EndLineNumber" in case it spans multiple lines
+ for replacement in diagnostic["Replacements"]:
+ # Sometimes, the FilePath may include ".." in "." as a path component
+ # However, file paths are stored in the offset table only after being
+ # converted to an abs path, in which case the stored path will differ
+ # from the FilePath and we'll end up looking for a path that's not in
+ # the lookup dict
+ # To fix this, we'll convert all the FilePaths to absolute paths
+ replacement["FilePath"] = Path(replacement["FilePath"]).resolve().as_posix()
+
+ # It's possible the replacement is needed in another file?
+ # Not really sure how that could come about, but let's
+ # cover our behinds in case it does happen:
+ if replacement["FilePath"] not in offset_lookup:
+ # Let's make sure we've the file offsets for this other file
+ offset_lookup.update(make_file_offset_lookup([replacement["FilePath"]]))
+
+ replacement["LineNumber"] = find_line_number_from_offset(
+ offset_lookup, replacement["FilePath"], replacement["Offset"]
+ )
+ replacement["EndLineNumber"] = find_line_number_from_offset(
+ offset_lookup,
+ replacement["FilePath"],
+ replacement["Offset"] + replacement["Length"],
+ )
+
+ # Now we can group them into consecutive lines
+ groups = []
+ for index, replacement in enumerate(diagnostic["Replacements"]):
+ if index == 0:
+ # First one starts a new group, always
+ groups.append([replacement])
+ elif (
+ replacement["LineNumber"] == groups[-1][-1]["LineNumber"]
+ or replacement["LineNumber"] - 1 == groups[-1][-1]["LineNumber"]
+ ):
+ # Same or adjacent line to the last line in the last group
+ # goes in the same group
+ groups[-1].append(replacement)
+ else:
+ # Otherwise, start a new group
+ groups.append([replacement])
+
+ # Turn the list into a dict
+ return {g[0]["LineNumber"]: g for g in groups}
+
+
+def replace_one_line(replacement_set, line_num, offset_lookup):
+ """Apply all the replacements in replacement_set at the same time"""
+
+ filename = replacement_set[0]["FilePath"]
+ # File offset at the start of the first line
+ line_offset = offset_lookup[filename][line_num]
+
+ # List of (start, end) offsets from line_offset
+ insert_offsets: list[tuple[Optional[int], Optional[int]]] = [(0, 0)]
+ # Read all the source lines into a dict so we only get one copy of
+ # each line, though we might read the same line in multiple times
+ source_lines = {}
+ for replacement in replacement_set:
+ start = replacement["Offset"] - line_offset
+ end = start + replacement["Length"]
+ insert_offsets.append((start, end))
+
+ # Make sure to read any extra lines we need too
+ for replacement_line_num in range(
+ replacement["LineNumber"], replacement["EndLineNumber"] + 1
+ ):
+ replacement_line_offset = offset_lookup[filename][replacement_line_num]
+ source_lines[replacement_line_num] = (
+ read_one_line(filename, replacement_line_offset) + "\n"
+ )
+
+ # Replacements might cross multiple lines, so squash them all together
+ source_line = "".join(source_lines.values()).rstrip("\n")
+
+ insert_offsets.append((None, None))
+
+ fragments = []
+ for (_, start), (end, _) in zip(insert_offsets[:-1], insert_offsets[1:]):
+ fragments.append(source_line[start:end])
+
+ new_line = ""
+ for fragment, replacement in zip(fragments, replacement_set):
+ new_line += fragment + replacement["ReplacementText"]
+
+ return source_line, new_line + fragments[-1]
+
+
+def format_ordinary_line(source_line, line_offset):
+ """Format a single C++ line with a diagnostic indicator"""
+
+ return textwrap.dedent(
+ f"""\
+ ```cpp
+ {source_line}
+ {line_offset * " " + "^"}
+ ```
+ """
+ )
+
+
+def format_diff_line(diagnostic, offset_lookup, source_line, line_offset, line_num):
+ """Format a replacement as a Github suggestion or diff block"""
+
+ end_line = line_num
+
+ # We're going to be appending to this
+ code_blocks = ""
+
+ replacement_sets = collate_replacement_sets(diagnostic, offset_lookup)
+
+ for replacement_line_num, replacement_set in replacement_sets.items():
+ old_line, new_line = replace_one_line(
+ replacement_set, replacement_line_num, offset_lookup
+ )
+
+ print(f"----------\n{old_line=}\n{new_line=}\n----------")
+
+ # If the replacement is for the same line as the
+ # diagnostic (which is where the comment will be), then
+ # format the replacement as a suggestion. Otherwise,
+ # format it as a diff
+ if replacement_line_num == line_num:
+ code_blocks += f"""
+```suggestion
+{new_line}
+```
+"""
+ end_line = replacement_set[-1]["EndLineNumber"]
+ else:
+ # Prepend each line in the replacement line with "+ "
+ # in order to make a nice diff block. The extra
+ # whitespace is so the multiline dedent-ed block below
+ # doesn't come out weird.
+ whitespace = "\n "
+ new_line = whitespace.join([f"+ {line}" for line in new_line.splitlines()])
+ old_line = whitespace.join([f"- {line}" for line in old_line.splitlines()])
+
+ rel_path = try_relative(replacement_set[0]["FilePath"]).as_posix()
+ code_blocks += textwrap.dedent(
+ f"""\
+
+ {rel_path}:{replacement_line_num}:
+ ```diff
+ {old_line}
+ {new_line}
+ ```
+ """
+ )
+ return code_blocks, end_line
+
+
+def try_relative(path) -> pathlib.Path:
+ """Try making `path` relative to current directory, otherwise make it an absolute path"""
+ try:
+ here = pathlib.Path.cwd()
+ return pathlib.Path(path).relative_to(here)
+ except ValueError:
+ return pathlib.Path(path).resolve()
+
+
+def fix_absolute_paths(build_compile_commands, base_dir):
+ """Update absolute paths in compile_commands.json to new location, if
+ compile_commands.json was created outside the Actions container
+ """
+
+ basedir = pathlib.Path(base_dir).resolve()
+ newbasedir = Path.cwd()
+
+ if basedir == newbasedir:
+ return
+
+ print(f"Found '{build_compile_commands}', updating absolute paths")
+ # We might need to change some absolute paths if we're inside
+ # a docker container
+ with Path(build_compile_commands).open() as f:
+ compile_commands = json.load(f)
+
+ print(f"Replacing '{basedir}' with '{newbasedir}'", flush=True)
+
+ modified_compile_commands = json.dumps(compile_commands).replace(
+ str(basedir), str(newbasedir)
+ )
+
+ with Path(build_compile_commands).open("w") as f:
+ f.write(modified_compile_commands)
+
+
+def format_notes(notes, offset_lookup):
+ """Format an array of notes into a single string"""
+
+ code_blocks = ""
+
+ for note in notes:
+ filename = note["FilePath"]
+
+ if filename == "":
+ return note["Message"]
+
+ resolved_path = str(pathlib.Path(filename).resolve().absolute())
+
+ line_num = find_line_number_from_offset(
+ offset_lookup, resolved_path, note["FileOffset"]
+ )
+ line_offset = note["FileOffset"] - offset_lookup[resolved_path][line_num]
+ source_line = read_one_line(
+ resolved_path, offset_lookup[resolved_path][line_num]
+ )
+
+ path = try_relative(resolved_path)
+ message = f"**{path.as_posix()}:{line_num}:** {note['Message']}"
+ code = format_ordinary_line(source_line, line_offset)
+ code_blocks += f"{message}\n{code}"
+
+ if notes:
+ code_blocks = f"\nAdditional context
\n\n{code_blocks}\n \n"
+
+ return code_blocks
+
+
+def make_comment_from_diagnostic(
+ diagnostic_name, diagnostic, filename, offset_lookup, notes
+):
+ """Create a comment from a diagnostic
+
+ Comment contains the diagnostic message, plus its name, along with
+ code block(s) containing either the exact location of the
+ diagnostic, or suggested fix(es).
+
+ """
+
+ line_num = find_line_number_from_offset(
+ offset_lookup, filename, diagnostic["FileOffset"]
+ )
+ line_offset = diagnostic["FileOffset"] - offset_lookup[filename][line_num]
+
+ source_line = read_one_line(filename, offset_lookup[filename][line_num])
+ end_line = line_num
+
+ print(
+ f"""{diagnostic}
+ {line_num=}; {line_offset=}; {source_line=}
+ """
+ )
+
+ if diagnostic["Replacements"]:
+ code_blocks, end_line = format_diff_line(
+ diagnostic, offset_lookup, source_line, line_offset, line_num
+ )
+ else:
+ # No fixit, so just point at the problem
+ code_blocks = format_ordinary_line(source_line, line_offset)
+
+ code_blocks += format_notes(notes, offset_lookup)
+
+ comment_body = (
+ f"warning: {diagnostic['Message']} [{diagnostic_name}]\n{code_blocks}"
+ )
+
+ return comment_body, end_line + 1
+
+
+def create_review_file(
+ clang_tidy_warnings, diff_lookup, offset_lookup, build_dir
+) -> Optional[PRReview]:
+ """Create a Github review from a set of clang-tidy diagnostics"""
+
+ if "Diagnostics" not in clang_tidy_warnings:
+ return None
+
+ comments: List[ReviewComment] = []
+
+ for diagnostic in clang_tidy_warnings["Diagnostics"]:
+ try:
+ diagnostic_message = diagnostic["DiagnosticMessage"]
+ except KeyError:
+ # Pre-clang-tidy-9 format
+ diagnostic_message = diagnostic
+
+ if diagnostic_message["FilePath"] == "":
+ continue
+
+ comment_body, end_line = make_comment_from_diagnostic(
+ diagnostic["DiagnosticName"],
+ diagnostic_message,
+ get_diagnostic_file_path(diagnostic, build_dir),
+ offset_lookup,
+ notes=diagnostic.get("Notes", []),
+ )
+
+ rel_path = try_relative(
+ get_diagnostic_file_path(diagnostic, build_dir)
+ ).as_posix()
+ # diff lines are 1-indexed
+ source_line = 1 + find_line_number_from_offset(
+ offset_lookup,
+ get_diagnostic_file_path(diagnostic, build_dir),
+ diagnostic_message["FileOffset"],
+ )
+
+ if rel_path not in diff_lookup or end_line not in diff_lookup[rel_path]:
+ print(
+ f"WARNING: Skipping comment for file '{rel_path}' not in PR changeset. Comment body is:\n{comment_body}"
+ )
+ continue
+
+ comments.append(
+ {
+ "path": rel_path,
+ "body": comment_body,
+ "side": "RIGHT",
+ "line": end_line,
+ }
+ )
+ # If this is a multiline comment, we need a couple more bits:
+ if end_line != source_line:
+ comments[-1].update(
+ {
+ "start_side": "RIGHT",
+ "start_line": source_line,
+ }
+ )
+
+ review: PRReview = {
+ "body": "clang-tidy made some suggestions",
+ "event": "COMMENT",
+ "comments": comments,
+ }
+ return review
+
+
+def make_timing_summary(
+ clang_tidy_profiling: Dict, real_time: datetime.timedelta, sha: Optional[str] = None
+) -> str:
+ if not clang_tidy_profiling:
+ return ""
+ top_amount = 10
+ wall_key = "time.clang-tidy.total.wall"
+ user_key = "time.clang-tidy.total.user"
+ sys_key = "time.clang-tidy.total.sys"
+ total_wall = sum(timings[wall_key] for timings in clang_tidy_profiling.values())
+ total_user = sum(timings[user_key] for timings in clang_tidy_profiling.values())
+ total_sys = sum(timings[sys_key] for timings in clang_tidy_profiling.values())
+ print(f"Took: {total_user:.2f}s user {total_sys:.2f} system {total_wall:.2f} total")
+ file_summary = textwrap.dedent(
+ f"""\
+ ### Top {top_amount} files
+ | File | user (s) | system (s) | total (s) |
+ | ----- | ---------------- | --------------- | ---------------- |
+ | Total | {total_user:.2f} | {total_sys:.2f} | {total_wall:.2f} |
+ """
+ )
+ topfiles = sorted(
+ (
+ (
+ os.path.relpath(file),
+ timings[user_key],
+ timings[sys_key],
+ timings[wall_key],
+ )
+ for file, timings in clang_tidy_profiling.items()
+ ),
+ key=lambda x: x[3],
+ reverse=True,
+ )
+
+ if "GITHUB_SERVER_URL" in os.environ and "GITHUB_REPOSITORY" in os.environ:
+ blob = f"{os.environ['GITHUB_SERVER_URL']}/{os.environ['GITHUB_REPOSITORY']}/blob/{sha}"
+ else:
+ blob = None
+ for f, u, s, w in list(topfiles)[:top_amount]:
+ if blob is not None:
+ f = f"[{f}]({blob}/{f})"
+ file_summary += f"|{f}|{u:.2f}|{s:.2f}|{w:.2f}|\n"
+
+ check_timings = {}
+ for timings in clang_tidy_profiling.values():
+ for check, timing in timings.items():
+ if check in [wall_key, user_key, sys_key]:
+ continue
+ base_check, time_type = check.rsplit(".", 1)
+ check_name = base_check.split(".", 2)[2]
+ t = check_timings.get(check_name, (0.0, 0.0, 0.0))
+ if time_type == "user":
+ t = t[0] + timing, t[1], t[2]
+ elif time_type == "sys":
+ t = t[0], t[1] + timing, t[2]
+ elif time_type == "wall":
+ t = t[0], t[1], t[2] + timing
+ check_timings[check_name] = t
+
+ check_summary = ""
+ if check_timings:
+ check_summary = textwrap.dedent(
+ f"""\
+ ### Top {top_amount} checks
+ | Check | user (s) | system (s) | total (s) |
+ | ----- | -------- | ---------- | --------- |
+ | Total | {total_user:.2f} | {total_sys:.2f} | {total_wall:.2f} |
+ """
+ )
+ topchecks = sorted(
+ ((check_name, *timings) for check_name, timings in check_timings.items()),
+ key=lambda x: x[3],
+ reverse=True,
+ )
+ for c, u, s, w in list(topchecks)[:top_amount]:
+ c = decorate_check_names(f"[{c}]").replace("[[", "[").rstrip("]")
+ check_summary += f"|{c}|{u:.2f}|{s:.2f}|{w:.2f}|\n"
+
+ return (
+ f"## Timing\nReal time: {real_time.seconds:.2f}\n{file_summary}{check_summary}"
+ )
+
+
+def filter_files(diff, include: List[str], exclude: List[str]) -> List:
+ changed_files = [filename.target_file[2:] for filename in diff]
+ files = []
+ for pattern in include:
+ files.extend(fnmatch.filter(changed_files, pattern))
+ print(f"include: {pattern}, file list now: {files}")
+ for pattern in exclude:
+ files = [f for f in files if not fnmatch.fnmatch(f, pattern)]
+ print(f"exclude: {pattern}, file list now: {files}")
+
+ return files
+
+
+def create_review(
+ pull_request: PullRequest,
+ build_dir: str,
+ clang_tidy_checks: str,
+ clang_tidy_binary: pathlib.Path,
+ config_file: str,
+ max_task: int,
+ include: List[str],
+ exclude: List[str],
+) -> Optional[PRReview]:
+ """Given the parameters, runs clang-tidy and creates a review.
+ If no files were changed, or no warnings could be found, None will be returned.
+
+ """
+
+ if max_task == 0:
+ max_task = multiprocessing.cpu_count()
+
+ diff = pull_request.get_pr_diff()
+ print(f"\nDiff from GitHub PR:\n{diff}\n")
+
+ files = filter_files(diff, include, exclude)
+
+ if files == []:
+ with message_group("No files to check!"), REVIEW_FILE.open("w") as review_file:
+ json.dump(
+ {
+ "body": "clang-tidy found no files to check",
+ "event": "COMMENT",
+ "comments": [],
+ },
+ review_file,
+ )
+ return None
+
+ print(f"Checking these files: {files}", flush=True)
+
+ line_ranges = get_line_ranges(diff, files)
+ if line_ranges == "[]":
+ with message_group("No lines added in this PR!"), REVIEW_FILE.open(
+ "w"
+ ) as review_file:
+ json.dump(
+ {
+ "body": "clang-tidy found no lines added",
+ "event": "COMMENT",
+ "comments": [],
+ },
+ review_file,
+ )
+ return None
+
+ print(f"Line filter for clang-tidy:\n{line_ranges}\n")
+
+ username = pull_request.get_pr_author() or "your name here"
+
+ # Run clang-tidy with the configured parameters and produce the CLANG_TIDY_FIXES file
+ export_fixes_dir = Path(tempfile.mkdtemp())
+ env = dict(os.environ, USER=username)
+ config = config_file_or_checks(clang_tidy_binary, clang_tidy_checks, config_file)
+ base_invocation = [
+ clang_tidy_binary,
+ #f"-p={build_dir}",
+ "--extra-arg-before=-I../..",
+ f"-line-filter={line_ranges}",
+ "--enable-check-profile",
+ f"-store-check-profile={PROFILE_DIR}",
+ ]
+ if config:
+ print(f"Using config: {config}")
+ base_invocation.append(config)
+ else:
+ print("Using recursive directory config")
+
+ print(f"Spawning a task queue with {max_task} processes")
+ start = datetime.datetime.now()
+ try:
+ # Spin up a bunch of tidy-launching threads.
+ task_queue = queue.Queue(max_task)
+ # List of files with a non-zero return code.
+ failed_files = []
+ lock = threading.Lock()
+ for _ in range(max_task):
+ t = threading.Thread(
+ target=build_clang_tidy_warnings,
+ args=(
+ base_invocation,
+ env,
+ export_fixes_dir,
+ task_queue,
+ lock,
+ failed_files,
+ ),
+ )
+ t.daemon = True
+ t.start()
+
+ # Fill the queue with files.
+ for name in files:
+ task_queue.put(name)
+
+ # Wait for all threads to be done.
+ task_queue.join()
+
+ except KeyboardInterrupt:
+ # This is a sad hack. Unfortunately subprocess goes
+ # bonkers with ctrl-c and we start forking merrily.
+ print("\nCtrl-C detected, goodbye.")
+ os.kill(0, 9)
+ raise
+ real_duration = datetime.datetime.now() - start
+
+ # Read and parse the CLANG_TIDY_FIXES file
+ print(f"Writing fixes to {FIXES_FILE} ...")
+ merge_replacement_files(export_fixes_dir, FIXES_FILE)
+ shutil.rmtree(export_fixes_dir)
+ clang_tidy_warnings = load_clang_tidy_warnings(FIXES_FILE)
+
+ # Read and parse the timing data
+ clang_tidy_profiling = load_and_merge_profiling()
+
+ try:
+ sha = pull_request.head_sha
+ except Exception:
+ sha = os.environ.get("GITHUB_SHA")
+
+ # Post to the action job summary
+ step_summary = make_timing_summary(clang_tidy_profiling, real_duration, sha)
+ set_summary(step_summary)
+
+ print("clang-tidy had the following warnings:\n", clang_tidy_warnings, flush=True)
+
+ diff_lookup = make_file_line_lookup(diff)
+ offset_lookup = make_file_offset_lookup(files)
+
+ with message_group("Creating review from warnings"):
+ review = create_review_file(
+ clang_tidy_warnings, diff_lookup, offset_lookup, build_dir
+ )
+ with REVIEW_FILE.open("w") as review_file:
+ json.dump(review, review_file)
+
+ return review
+
+
+def download_artifacts(pull: PullRequest, workflow_id: int):
+ """Attempt to automatically download the artifacts from a previous
+ run of the review Action"""
+
+ # workflow id is an input: ${{github.event.workflow_run.id }}
+ workflow: WorkflowRun = pull.repo.get_workflow_run(workflow_id)
+ # I don't understand why mypy complains about the next line!
+ for artifact in workflow.get_artifacts():
+ if artifact.name == "clang-tidy-review":
+ break
+ else:
+ # Didn't find the artefact, so bail
+ print(
+ f"Couldn't find 'clang-tidy-review' artifact for workflow '{workflow_id}'. "
+ f"Available artifacts are: {list(workflow.get_artifacts())}"
+ )
+ return None, None
+
+ headers = {
+ "Accept": "application/vnd.github+json",
+ "Authorization": f"Bearer {pull.token}",
+ }
+ r = urllib3.request("GET", artifact.archive_download_url, headers=headers)
+ if r.status != 200:
+ print(
+ f"WARNING: Couldn't automatically download artifacts for workflow '{workflow_id}', response was: {r}: {r.reason}"
+ )
+ return None, None
+
+ data = zipfile.ZipFile(io.BytesIO(r.data))
+ filenames = data.namelist()
+
+ metadata = (
+ json.loads(data.read(str(METADATA_FILE)))
+ if str(METADATA_FILE) in filenames
+ else None
+ )
+ review = (
+ json.loads(data.read(str(REVIEW_FILE)))
+ if str(REVIEW_FILE) in filenames
+ else None
+ )
+ return metadata, review
+
+
+def load_metadata() -> Optional[Metadata]:
+ """Load metadata from the METADATA_FILE path"""
+
+ if not METADATA_FILE.exists():
+ print(f"WARNING: Could not find metadata file ('{METADATA_FILE}')", flush=True)
+ return None
+
+ with METADATA_FILE.open() as metadata_file:
+ return json.load(metadata_file)
+
+
+def save_metadata(pr_number: int) -> None:
+ """Save metadata to the METADATA_FILE path"""
+
+ metadata: Metadata = {"pr_number": pr_number}
+
+ with METADATA_FILE.open("w") as metadata_file:
+ json.dump(metadata, metadata_file)
+
+
+def load_review(review_file: pathlib.Path) -> Optional[PRReview]:
+ """Load review output"""
+
+ if not review_file.exists():
+ print(f"WARNING: Could not find review file ('{review_file}')", flush=True)
+ return None
+
+ with review_file.open() as review_file_handle:
+ payload = json.load(review_file_handle)
+ return payload or None
+
+
+def load_and_merge_profiling() -> Dict:
+ result = {}
+ for profile_file in PROFILE_DIR.glob("*.json"):
+ profile_dict = json.load(profile_file.open())
+ filename = profile_dict["file"]
+ current_profile = result.get(filename, {})
+ for check, timing in profile_dict["profile"].items():
+ current_profile[check] = current_profile.get(check, 0.0) + timing
+ result[filename] = current_profile
+ for filename, timings in list(result.items()):
+ timings["time.clang-tidy.total.wall"] = sum(
+ v for k, v in timings.items() if k.endswith("wall")
+ )
+ timings["time.clang-tidy.total.user"] = sum(
+ v for k, v in timings.items() if k.endswith("user")
+ )
+ timings["time.clang-tidy.total.sys"] = sum(
+ v for k, v in timings.items() if k.endswith("sys")
+ )
+ result[filename] = timings
+ return result
+
+
+def load_and_merge_reviews(review_files: List[pathlib.Path]) -> Optional[PRReview]:
+ reviews = []
+ for file in review_files:
+ review = load_review(file)
+ if review is not None and len(review.get("comments", [])) > 0:
+ reviews.append(review)
+
+ if not reviews:
+ return None
+
+ result = reviews[0]
+
+ comments = set()
+ for review in reviews:
+ comments.update(HashableComment(**c) for c in review["comments"])
+
+ result["comments"] = [c.__dict__ for c in sorted(comments)]
+
+ return result
+
+
+def get_line_ranges(diff, files):
+ """Return the line ranges of added lines in diff, suitable for the
+ line-filter argument of clang-tidy
+
+ """
+
+ lines_by_file = {}
+ for filename in diff:
+ if filename.target_file[2:] not in files:
+ continue
+ added_lines = []
+ for hunk in filename:
+ for line in hunk:
+ if line.is_added:
+ added_lines.append(line.target_line_no)
+
+ for _, group in itertools.groupby(
+ enumerate(added_lines), lambda ix: ix[0] - ix[1]
+ ):
+ groups = list(map(itemgetter(1), group))
+ lines_by_file.setdefault(filename.target_file[2:], []).append(
+ [groups[0], groups[-1]]
+ )
+
+ line_filter_json = []
+ for name, lines in lines_by_file.items():
+ line_filter_json.append({"name": name, "lines": lines})
+ # On windows, unidiff has forward slashes but cl.exe expects backslashes.
+ # However, clang.exe on windows expects forward slashes.
+ # Adding a copy of the line filters with backslashes allows for both cl.exe and clang.exe to work.
+ if os.path.sep == "\\":
+ # Converts name to backslashes for the cl.exe line filter.
+ name = Path.joinpath(*name.split("/"))
+ line_filter_json.append({"name": name, "lines": lines})
+ return json.dumps(line_filter_json, separators=(",", ":"))
+
+
+def cull_comments(pull_request: PullRequest, review, max_comments):
+ """Remove comments from review that have already been posted, and keep
+ only the first max_comments
+
+ """
+
+ unposted_comments = {HashableComment(**c) for c in review["comments"]}
+ posted_comments = {HashableComment(**c) for c in pull_request.get_pr_comments()}
+
+ review["comments"] = [
+ c.__dict__ for c in sorted(unposted_comments - posted_comments)
+ ]
+
+ if len(review["comments"]) > max_comments:
+ review["body"] += (
+ "\n\nThere were too many comments to post at once. "
+ f"Showing the first {max_comments} out of {len(review['comments'])}. "
+ "Check the log or trigger a new build to see more."
+ )
+ review["comments"] = review["comments"][:max_comments]
+
+ return review
+
+
+def strip_enclosing_quotes(string: str) -> str:
+ """Strip leading/trailing whitespace and remove any enclosing quotes"""
+ stripped = string.strip()
+
+ # Need to check double quotes again in case they're nested inside
+ # single quotes
+ for quote in ['"', "'", '"']:
+ if stripped.startswith(quote) and stripped.endswith(quote):
+ stripped = stripped[1:-1]
+ return stripped
+
+
+def set_output(key: str, val: str) -> bool:
+ if "GITHUB_OUTPUT" not in os.environ:
+ return False
+
+ # append key-val pair to file
+ with Path(os.environ["GITHUB_OUTPUT"]).open("a") as f:
+ f.write(f"{key}={val}\n")
+
+ return True
+
+
+def set_summary(val: str) -> bool:
+ if "GITHUB_STEP_SUMMARY" not in os.environ:
+ return False
+
+ # append key-val pair to file
+ with Path(os.environ["GITHUB_STEP_SUMMARY"]).open("a") as f:
+ f.write(val)
+
+ return True
+
+
+def decorate_check_names(comment: str) -> str:
+ """
+ Split on first dash into two groups of string in [] at end of line
+ exception: if the first group starts with 'clang' such as 'clang-diagnostic-error'
+ exception to the exception: if the string starts with 'clang-analyzer', in which case, make it the first group
+ """
+ version = "extra"
+ url = f"https://clang.llvm.org/{version}/clang-tidy/checks"
+ regex = r"(\[((?:clang-analyzer)|(?:(?!clang)[\w]+))-([\.\w-]+)\]$)"
+ subst = f"[\\g<1>({url}/\\g<2>/\\g<3>.html)]"
+ return re.sub(regex, subst, comment, count=1, flags=re.MULTILINE)
+
+
+def decorate_comment(comment: ReviewComment) -> ReviewComment:
+ comment["body"] = decorate_check_names(comment["body"])
+ return comment
+
+
+def decorate_comments(review: PRReview) -> PRReview:
+ review["comments"] = list(map(decorate_comment, review["comments"]))
+ return review
+
+
+def post_review(
+ pull_request: PullRequest,
+ review: Optional[PRReview],
+ max_comments: int,
+ lgtm_comment_body: str,
+ dry_run: bool,
+) -> int:
+ print(
+ "Created the following review:\n", pprint.pformat(review, width=130), flush=True
+ )
+
+ if not review or review["comments"] == []:
+ print("No warnings to report, LGTM!")
+ if not dry_run:
+ pull_request.post_lgtm_comment(lgtm_comment_body)
+ return 0
+
+ total_comments = len(review["comments"])
+
+ set_output("total_comments", str(total_comments))
+
+ decorated_review = decorate_comments(review)
+
+ print("Removing already posted or extra comments", flush=True)
+ trimmed_review = cull_comments(pull_request, decorated_review, max_comments)
+
+ if not trimmed_review["comments"]:
+ print("Everything already posted!")
+ return total_comments
+
+ if dry_run:
+ pprint.pprint(review, width=130)
+ return total_comments
+
+ print("Posting the review:\n", pprint.pformat(trimmed_review), flush=True)
+ pull_request.post_review(trimmed_review)
+
+ return total_comments
+
+
+def convert_comment_to_annotations(comment):
+ return {
+ "path": comment["path"],
+ "start_line": comment.get("start_line", comment["line"]),
+ "end_line": comment["line"],
+ "annotation_level": "warning",
+ "title": "clang-tidy",
+ "message": comment["body"],
+ }
+
+
+def post_annotations(
+ pull_request: PullRequest, review: Optional[PRReview]
+) -> Optional[int]:
+ """Post the first 10 comments in the review as annotations"""
+
+ body: dict[str, Any] = {
+ "name": "clang-tidy-review",
+ "head_sha": pull_request.pull_request.head.sha,
+ "status": "completed",
+ "conclusion": "success",
+ }
+
+ if review is None:
+ return None
+
+ if review["comments"] == []:
+ print("No warnings to report, LGTM!")
+ pull_request.post_annotations(body)
+
+ comments = []
+ for comment in review["comments"]:
+ first_line = comment["body"].splitlines()[0]
+ comments.append(
+ f"{comment['path']}:{comment.get('start_line', comment.get('line', 0))}: {first_line}"
+ )
+
+ total_comments = len(review["comments"])
+
+ body["conclusion"] = "neutral"
+ body["output"] = {
+ "title": "clang-tidy-review",
+ "summary": f"There were {total_comments} warnings",
+ "text": "\n".join(comments),
+ "annotations": [
+ convert_comment_to_annotations(comment)
+ for comment in review["comments"][:MAX_ANNOTATIONS]
+ ],
+ }
+
+ pull_request.post_annotations(body)
+ return total_comments
+
+
+def bool_argument(user_input) -> bool:
+ """Convert text to bool"""
+ user_input = str(user_input).upper()
+ if user_input == "TRUE":
+ return True
+ if user_input == "FALSE":
+ return False
+ raise ValueError("Invalid value passed to bool_argument")
diff --git a/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/post.py b/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/post.py
new file mode 100644
index 0000000..67ff296
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/post.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+
+# clang-tidy review - post comments
+# Copyright (c) 2022 Peter Hill
+# SPDX-License-Identifier: MIT
+# See LICENSE for more information
+
+import argparse
+import pathlib
+import pprint
+import sys
+
+from clang_tidy_review import (
+ REVIEW_FILE,
+ PullRequest,
+ add_auth_arguments,
+ bool_argument,
+ download_artifacts,
+ get_auth_from_arguments,
+ load_and_merge_reviews,
+ load_metadata,
+ post_annotations,
+ post_review,
+ strip_enclosing_quotes,
+)
+
+
+def main() -> int:
+ parser = argparse.ArgumentParser(
+ description="Post a review based on feedback generated by the clang-tidy-review action"
+ )
+
+ parser.add_argument("--repo", help="Repo name in form 'owner/repo'")
+ parser.add_argument(
+ "--max-comments",
+ help="Maximum number of comments to post at once",
+ type=int,
+ default=25,
+ )
+ parser.add_argument(
+ "--lgtm-comment-body",
+ help="Message to post on PR if no issues are found. An empty string will post no LGTM comment.",
+ type=str,
+ default='clang-tidy review says "All clean, LGTM! :+1:"',
+ )
+ parser.add_argument(
+ "--dry-run", help="Run and generate review, but don't post", action="store_true"
+ )
+ parser.add_argument(
+ "--workflow_id",
+ help="ID of the workflow that generated the review",
+ default=None,
+ )
+ parser.add_argument(
+ "--annotations",
+ help="Use annotations instead of comments",
+ type=bool_argument,
+ default=False,
+ )
+ parser.add_argument(
+ "--num-comments-as-exitcode",
+ help="Set the exit code to be the amount of comments",
+ type=bool_argument,
+ default=True,
+ )
+ add_auth_arguments(parser)
+ parser.add_argument(
+ "reviews",
+ metavar="REVIEW_FILES",
+ type=pathlib.Path,
+ nargs="*",
+ default=[pathlib.Path(REVIEW_FILE)],
+ help="Split workflow review results",
+ )
+
+ args = parser.parse_args()
+
+ pull_request = PullRequest(args.repo, None, get_auth_from_arguments(args))
+
+ # Try to read the review artifacts if they're already present
+ metadata = load_metadata()
+ review = load_and_merge_reviews(args.reviews)
+
+ # If not, try to download them automatically
+ if metadata is None and args.workflow_id is not None:
+ print("Attempting to automatically download review artifacts", flush=True)
+ metadata, review = download_artifacts(pull_request, int(args.workflow_id))
+
+ if metadata is None:
+ raise RuntimeError("Couldn't find review metadata")
+
+ pull_request.pr_number = metadata["pr_number"]
+
+ print(
+ "clang-tidy-review generated the following review",
+ pprint.pformat(review, width=130),
+ flush=True,
+ )
+
+ if args.annotations:
+ exit_code = post_annotations(pull_request, review)
+ else:
+ lgtm_comment_body = strip_enclosing_quotes(args.lgtm_comment_body)
+ exit_code = post_review(
+ pull_request, review, args.max_comments, lgtm_comment_body, args.dry_run
+ )
+
+ return (exit_code or 0) if args.num_comments_as_exitcode else 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/review.py b/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/review.py
new file mode 100644
index 0000000..38132e7
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/clang_tidy_review/clang_tidy_review/review.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python3
+
+# clang-tidy review
+# Copyright (c) 2020 Peter Hill
+# SPDX-License-Identifier: MIT
+# See LICENSE for more information
+
+import argparse
+import re
+import subprocess
+from pathlib import Path
+
+from clang_tidy_review import (
+ PullRequest,
+ add_auth_arguments,
+ bool_argument,
+ create_review,
+ fix_absolute_paths,
+ get_auth_from_arguments,
+ message_group,
+ post_annotations,
+ post_review,
+ save_metadata,
+ set_output,
+ strip_enclosing_quotes,
+)
+
+BAD_CHARS_APT_PACKAGES_PATTERN = "[;&|($]"
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Create a review from clang-tidy warnings"
+ )
+ parser.add_argument("--repo", help="Repo name in form 'owner/repo'")
+ parser.add_argument("--pr", help="PR number", type=int)
+ parser.add_argument(
+ "--clang_tidy_binary",
+ help="clang-tidy binary",
+ default="clang-tidy-14",
+ type=Path,
+ )
+ parser.add_argument(
+ "--build_dir", help="Directory with compile_commands.json", default="."
+ )
+ parser.add_argument(
+ "--base_dir",
+ help="Absolute path of initial working directory if compile_commands.json generated outside of Action",
+ default=".",
+ )
+ parser.add_argument(
+ "--clang_tidy_checks",
+ help="checks argument",
+ default="'-*,performance-*,readability-*,bugprone-*,clang-analyzer-*,cppcoreguidelines-*,mpi-*,misc-*'",
+ )
+ parser.add_argument(
+ "--config_file",
+ help="Path to .clang-tidy config file. If not empty, takes precedence over --clang_tidy_checks",
+ default="",
+ )
+ parser.add_argument(
+ "--include",
+ help="Comma-separated list of files or patterns to include",
+ type=str,
+ nargs="?",
+ default="*.[ch],*.[ch]xx,*.[ch]pp,*.[ch]++,*.cc,*.hh",
+ )
+ parser.add_argument(
+ "--exclude",
+ help="Comma-separated list of files or patterns to exclude",
+ nargs="?",
+ default="",
+ )
+ parser.add_argument(
+ "--apt-packages",
+ help="Comma-separated list of apt packages to install",
+ type=str,
+ default="",
+ )
+ parser.add_argument(
+ "--cmake-command",
+ help="If set, run CMake as part of the action with this command",
+ type=str,
+ default="",
+ )
+ parser.add_argument(
+ "--max-comments",
+ help="Maximum number of comments to post at once",
+ type=int,
+ default=25,
+ )
+ parser.add_argument(
+ "--lgtm-comment-body",
+ help="Message to post on PR if no issues are found. An empty string will post no LGTM comment.",
+ type=str,
+ default='clang-tidy review says "All clean, LGTM! :+1:"',
+ )
+ parser.add_argument(
+ "--split_workflow",
+ help=(
+ "Only generate but don't post the review, leaving it for the second workflow. "
+ "Relevant when receiving PRs from forks that don't have the required permissions to post reviews."
+ ),
+ type=bool_argument,
+ default=False,
+ )
+ parser.add_argument(
+ "--annotations",
+ help="Use annotations instead of comments",
+ type=bool_argument,
+ default=False,
+ )
+ parser.add_argument(
+ "-j",
+ "--parallel",
+ help="Number of tidy instances to be run in parallel.",
+ type=int,
+ default=0,
+ )
+ parser.add_argument(
+ "--dry-run", help="Run and generate review, but don't post", action="store_true"
+ )
+ add_auth_arguments(parser)
+
+ args = parser.parse_args()
+
+ # Remove any enclosing quotes and extra whitespace
+ exclude = strip_enclosing_quotes(args.exclude).split(",")
+ include = strip_enclosing_quotes(args.include).split(",")
+
+ if args.apt_packages:
+ # Try to make sure only 'apt install' is run
+ apt_packages = re.split(BAD_CHARS_APT_PACKAGES_PATTERN, args.apt_packages)[
+ 0
+ ].split(",")
+ apt_packages = [pkg.strip() for pkg in apt_packages]
+ with message_group(f"Installing additional packages: {apt_packages}"):
+ subprocess.run(["apt-get", "update"], check=True)
+ subprocess.run(
+ ["apt-get", "install", "-y", "--no-install-recommends", *apt_packages],
+ check=True,
+ )
+
+ build_compile_commands = f"{args.build_dir}/compile_commands.json"
+
+ cmake_command = strip_enclosing_quotes(args.cmake_command)
+
+ # If we run CMake as part of the action, then we know the paths in
+ # the compile_commands.json file are going to be correct
+ if cmake_command:
+ with message_group(f"Running cmake: {cmake_command}"):
+ subprocess.run(cmake_command, shell=True, check=True)
+
+ elif Path(build_compile_commands).exists():
+ fix_absolute_paths(build_compile_commands, args.base_dir)
+
+ pull_request = PullRequest(args.repo, args.pr, get_auth_from_arguments(args))
+
+ review = create_review(
+ pull_request,
+ args.build_dir,
+ args.clang_tidy_checks,
+ args.clang_tidy_binary,
+ args.config_file,
+ args.parallel,
+ include,
+ exclude,
+ )
+
+ with message_group("Saving metadata"):
+ save_metadata(args.pr)
+
+ if args.split_workflow:
+ total_comments = 0 if review is None else len(review["comments"])
+ set_output("total_comments", str(total_comments))
+ print("split_workflow is enabled, not posting review")
+ return
+
+ if args.annotations:
+ post_annotations(pull_request, review)
+ else:
+ lgtm_comment_body = strip_enclosing_quotes(args.lgtm_comment_body)
+ post_review(
+ pull_request, review, args.max_comments, lgtm_comment_body, args.dry_run
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/actions/clang-tidy-review/post/clang_tidy_review/pyproject.toml b/.github/actions/clang-tidy-review/post/clang_tidy_review/pyproject.toml
new file mode 100644
index 0000000..2d73b4e
--- /dev/null
+++ b/.github/actions/clang-tidy-review/post/clang_tidy_review/pyproject.toml
@@ -0,0 +1,66 @@
+[build-system]
+requires = [
+ "setuptools >= 65",
+ "setuptools_scm[toml] >= 6.2",
+ "wheel >= 0.29.0",
+]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "clang-tidy-review"
+description = "Run clang-tidy as a GitHub action and automatically post warnings as comments"
+readme = "README.md"
+authors = [{name = "Peter Hill", email = "peter.hill@york.ac.uk"}]
+license = {text = "MIT"}
+dependencies = [
+ "PyGithub ~= 2.1",
+ "unidiff ~= 0.6.0",
+ "pyyaml ~= 6.0.1",
+ "urllib3 ~= 2.2.1",
+]
+keywords = ["C++", "static-analysis"]
+dynamic = ["version"]
+
+[project.urls]
+source = "https://github.com/ZedThree/clang-tidy-review"
+tracker = "https://github.com/ZedThree/clang-tidy-review/issues"
+
+[project.scripts]
+review = "clang_tidy_review.review:main"
+post = "clang_tidy_review.post:main"
+
+[project.optional-dependencies]
+tests = [
+ "pytest >= 3.3.0",
+]
+lint = [
+ "black",
+ "ruff",
+]
+
+[tool.setuptools]
+packages = ["clang_tidy_review"]
+
+[tool.setuptools_scm]
+root = "../.."
+fallback_version = "0.0.0-dev"
+
+[tool.black]
+extend_exclude = "_version.py"
+
+[tool.ruff.lint]
+extend-select = [
+ "B", # flake8-bugbear
+ "I", # isort
+ "C4", # flake8-comprehensions
+ "ICN", # flake8-import-conventions
+ "PT", # flake8-pytest-style
+ "PTH", # flake8-use-pathlib
+ "RET", # flake8-return
+ "RUF", # Ruff-specific
+ "SIM", # flake8-simplify
+ "UP", # pyupgrade
+ "YTT", # flake8-2020
+ "EXE", # flake8-executable
+ "FURB", # refurb
+]
diff --git a/.github/actions/clang-tidy-review/upload/action.yml b/.github/actions/clang-tidy-review/upload/action.yml
new file mode 100644
index 0000000..a4395d2
--- /dev/null
+++ b/.github/actions/clang-tidy-review/upload/action.yml
@@ -0,0 +1,16 @@
+name: 'clang-tidy review - upload artefacts'
+author: 'Peter Hill'
+description: 'Upload artefacts created from a clang-tidy-review run'
+branding:
+ icon: 'book-open'
+ color: 'red'
+runs:
+ using: 'composite'
+ steps:
+ - uses: actions/upload-artifact@v4
+ with:
+ name: clang-tidy-review
+ path: |
+ clang-tidy-review-output.json
+ clang-tidy-review-metadata.json
+ clang_fixes.json
diff --git a/.github/workflows/msbuild.yml b/.github/workflows/msbuild.yml
index 78a9d28..1e8ee4a 100644
--- a/.github/workflows/msbuild.yml
+++ b/.github/workflows/msbuild.yml
@@ -8,8 +8,8 @@ name: 🧩 MSBuild
on:
push:
branches: [ "main" ]
- pull_request:
- branches: [ "main" ]
+ #pull_request:
+ # branches: [ "main" ]
env:
# Path to the solution file relative to the root of the project.
diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml
index 50c648a..ca83ebb 100644
--- a/.github/workflows/super-linter.yml
+++ b/.github/workflows/super-linter.yml
@@ -9,8 +9,8 @@ name: Lint Code Base
on:
push:
branches: [ "main" ]
- pull_request:
- branches: [ "main" ]
+ #pull_request:
+ # branches: [ "main" ]
jobs:
run-lint:
runs-on: ubuntu-latest
diff --git a/.github/workflows/tidy-clang.yml b/.github/workflows/tidy-clang.yml
index 330d5b7..e6796af 100644
--- a/.github/workflows/tidy-clang.yml
+++ b/.github/workflows/tidy-clang.yml
@@ -9,9 +9,13 @@ name: Clang Tidy
on:
push:
branches: [ "main" ]
- paths: ['**.c', '**.cpp', '**.h', '**.hpp', '**.cxx', '**.hxx', '**.cc', '**.hh', '**CMakeLists.txt', 'meson.build', '**.cmake']
+ paths: ['**.c', '**.cpp', '**.h', '**.hpp', '**.cxx', '**.hxx', '**.cc', '**.hh', '**CMakeLists.txt', '**.cmake']
pull_request:
branches: [ "main" ]
+ paths: ['**.c', '**.cpp', '**.h', '**.hpp', '**.cxx', '**.hxx', '**.cc', '**.hh', '**CMakeLists.txt', '**.cmake']
+permissions:
+ contents: read
+ pull-requests: write
jobs:
run-lint:
runs-on: ubuntu-latest
@@ -22,6 +26,16 @@ jobs:
# Full git history is needed to get a proper list of changed files within `super-linter`
fetch-depth: 0
+ #- uses: ./.github/actions/clang-tidy-review
+ # id: review
+
+ # Uploads an artefact containing clang_fixes.json
+ #- uses: ./.github/actions/clang-tidy-review/upload
+ # id: upload-review
+
+ # If there are any comments, fail the check
+ #- if: steps.review.outputs.total_comments > 0
+ # run: exit 1
- uses: cpp-linter/cpp-linter-action@v2
id: linter
env:
@@ -29,7 +43,12 @@ jobs:
with:
style: '' # Use .clang-format config file
tidy-checks: '' # Use .clang-tidy config file
- extra-args: '-std=c++17'
+ extra-args: '-std=c++17 -I./'
verbosity: debug
+ tidy-review: true
+ passive-reviews: true
# only 'update' a single comment in a pull request thread.
#thread-comments: ${{ github.event_name == 'pull_request' && 'update' }}
+ - name: Fail fast?!
+ if: steps.linter.outputs.checks-failed > 0
+ run: exit 1
diff --git a/MeshPrimitive.h b/MeshPrimitive.h
new file mode 100644
index 0000000..1a180e6
--- /dev/null
+++ b/MeshPrimitive.h
@@ -0,0 +1,123 @@
+/*********************************************************************
+ * \file MeshPrimitive.h
+ * \brief Mesh primitive, aggregate, use shared_ptr
+ *
+ * \author Lancelot 'Robin' Chen
+ * \date November 2022
+ *********************************************************************/
+#ifndef _MESH_PRIMITIVE_H
+#define _MESH_PRIMITIVE_H
+
+#include "Primitives/Primitive.h"
+#include "Geometries/GeometryData.h"
+#include "GameEngine/RenderBuffer.h"
+#include "GameEngine/EffectTextureMap.h"
+#include "Renderer/Renderer.h"
+#include "Renderer/RenderElement.h"
+#include "PrimitiveMaterial.h"
+#include "GameEngine/EffectSemanticTexture.h"
+#include
+#include
+#include
+#include
+
+namespace Enigma::Renderables
+{
+ using error = std::error_code;
+
+ class PrimitiveMaterial;
+
+ class MeshPrimitive : public Primitives::Primitive
+ {
+ DECLARE_EN_RTTI;
+ public:
+ using EffectMaterialList = std::vector>;
+ using TextureMapList = std::vector;
+ public:
+ MeshPrimitive(const Primitives::PrimitiveId& id);
+ MeshPrimitive(const MeshPrimitive&) = delete; // non-copyable
+ MeshPrimitive(MeshPrimitive&&) = delete;
+ ~MeshPrimitive() override;
+ MeshPrimitive& operator=(const MeshPrimitive&) = delete;
+ MeshPrimitive& operator=(MeshPrimitive&&) = delete;
+
+ static std::shared_ptr create(const Primitives::PrimitiveId& id);
+
+ virtual std::shared_ptr assembler() const override;
+ virtual void assemble(const std::shared_ptr& assembler) const override;
+ virtual std::shared_ptr disassembler() const override;
+ virtual void disassemble(const std::shared_ptr& disassembler) override; ///< that's double dispatch
+
+ virtual std::shared_ptr intersector(const MathLib::Ray3& ray) override;
+
+ /** get geometry data */
+ const Geometries::GeometryDataPtr& getGeometryData() const { return m_geometry; };
+
+ std::shared_ptr getMaterial(unsigned index);
+ const std::vector>& materials() const { return m_materials; }
+ unsigned getMaterialCount() const;
+ virtual void changeMaterials(const std::vector>& materials);
+ virtual void rebindMaterials();
+
+ void changeSemanticTexture(const Engine::EffectSemanticTexture& semantic_texture);
+ void bindSemanticTexture(const Engine::EffectSemanticTexture& semantic_texture);
+ void bindSemanticTextures(const std::vector& textures);
+
+ /** update render buffer */
+ error updateRenderBuffer();
+ /** update render buffer */
+ error rangedUpdateRenderBuffer(unsigned vtx_offset, unsigned vtx_count, std::optional idx_offset, std::optional idx_count);
+
+ /** render list id */
+ Renderer::Renderer::RenderListID renderListId() const { return m_renderListID; };
+ void renderListId(Renderer::Renderer::RenderListID id) { m_renderListID = id; };
+
+ /** insert to renderer */
+ virtual error insertToRendererWithTransformUpdating(const std::shared_ptr& renderer,
+ const MathLib::Matrix4& mxWorld, const Engine::RenderLightingState& lightingState) override;
+ /** remove from renderer */
+ virtual error removeFromRenderer(const std::shared_ptr& renderer) override;
+
+ /** calculate bounding volume */
+ virtual void calculateBoundingVolume(bool axis_align) override;
+
+ /** update world transform */
+ virtual void updateWorldTransform(const MathLib::Matrix4& mxWorld) override;
+
+ virtual void selectVisualTechnique(const std::string& techniqueName) override;
+
+ /** @name building mesh primitive */
+ //@{
+ /** link geometry object and render buffer */
+ void linkGeometryData(const Geometries::GeometryDataPtr& geo, const Engine::RenderBufferPtr& render_buffer);
+ /** change segment's effect */
+ virtual void changeEffectInSegment(unsigned index, const std::shared_ptr& effect);
+ /** change primitive's effect */
+ virtual void changeEffects(const EffectMaterialList& effects);
+ /** change segment's texture map */
+ void changeTextureMapInSegment(unsigned index, const Engine::EffectTextureMap& tex_map);
+ /** change primitive's texture map */
+ void changeTextureMaps(const TextureMapList& tex_maps);
+ /** create render elements */
+ void createRenderElements();
+ //@}
+
+ protected:
+ void cleanupGeometry();
+
+ void bindPrimitiveMaterials();
+ void bindSegmentMaterial(unsigned index);
+ void loosePrimitiveMaterials();
+ void looseSegmentMaterial(unsigned index);
+
+ protected:
+ using RenderElementList = std::vector>;
+ Geometries::GeometryDataPtr m_geometry;
+ Engine::RenderBufferPtr m_renderBuffer;
+ RenderElementList m_elements;
+ std::vector> m_materials;
+ Renderer::Renderer::RenderListID m_renderListID; ///< default : render group scene
+ };
+}
+
+#endif // _MESH_PRIMITIVE_H
diff --git a/StdAny/StdAny/MeshPrimitive.cpp b/StdAny/StdAny/MeshPrimitive.cpp
new file mode 100644
index 0000000..cae0254
--- /dev/null
+++ b/StdAny/StdAny/MeshPrimitive.cpp
@@ -0,0 +1,345 @@
+#include "MeshPrimitive.h"
+#include "MeshPrimitiveAssembler.h"
+#include "GameEngine/EffectMaterial.h"
+#include "GameEngine/EffectMaterialSource.h"
+#include "GraphicKernel/IShaderVariable.h"
+#include "GameEngine/RenderBuffer.h"
+#include "Geometries/GeometryData.h"
+#include "GameEngine/IRenderer.h"
+#include "Renderer/RenderElement.h"
+#include "Platforms/PlatformLayer.h"
+#include "Geometries/GeometryDataQueries.h"
+#include "RenderableErrors.h"
+#include "Geometries/GeometryAssembler.h"
+#include "IntersectorMeshRay3.h"
+#include "RenderableIntersectorCommands.h"
+#include
+
+using namespace Enigma::Renderables;
+using namespace Enigma::Engine;
+using namespace Enigma::Graphics;
+
+DEFINE_RTTI(Renderables, MeshPrimitive, Primitive);
+
+MeshPrimitive::MeshPrimitive(const Primitives::PrimitiveId& id) : Primitive(id)
+{
+ m_lazyStatus.changeStatus(Frameworks::LazyStatus::Status::Ready);
+ m_factoryKey = FactoryKey(TYPE_RTTI);
+ m_geometry = nullptr;
+ m_renderBuffer = nullptr;
+ m_renderListID = Renderer::Renderer::RenderListID::Scene;
+ m_elements.clear();
+ m_materials.clear();
+}
+
+MeshPrimitive::~MeshPrimitive()
+{
+ cleanupGeometry();
+ loosePrimitiveMaterials();
+ m_materials.clear();
+ std::make_shared(m_id)->execute();
+}
+
+std::shared_ptr MeshPrimitive::create(const Primitives::PrimitiveId& id)
+{
+ return std::make_shared(id);
+}
+
+std::shared_ptr MeshPrimitive::assembler() const
+{
+ return std::make_shared(m_id.origin(), m_factoryKey);
+}
+
+void MeshPrimitive::assemble(const std::shared_ptr& assembler) const
+{
+ assert(assembler);
+ Primitive::assemble(assembler);
+ const auto mesh_assembler = std::dynamic_pointer_cast(assembler);
+ if (!mesh_assembler) return;
+ if (m_geometry)
+ {
+ mesh_assembler->geometryId(m_geometry->id());
+ //if ((m_geometry->factoryDesc().instanceType() == FactoryDesc::InstanceType::Native)
+ // || (m_geometry->factoryDesc().instanceType() == FactoryDesc::InstanceType::ResourceAsset))
+ if (m_geometry->factoryKey().isNative()) // only native geometry need to be assembled
+ {
+ mesh_assembler->geometry(m_geometry);
+ }
+ }
+ for (auto& mat : m_materials)
+ {
+ mesh_assembler->addMaterial(mat);
+ }
+ mesh_assembler->renderListID(m_renderListID);
+}
+
+std::shared_ptr MeshPrimitive::disassembler() const
+{
+ return std::make_shared();
+}
+
+void MeshPrimitive::disassemble(const std::shared_ptr& disassembler)
+{
+ assert(disassembler);
+ assert(m_id.origin() == disassembler->id().origin()); // id is already set in the constructor
+ // primitive disassembler 中的項目分項設定,因為有些前後依賴問題
+ m_factoryKey = disassembler->factoryKey();
+ const auto mesh_disassembler = std::dynamic_pointer_cast(disassembler);
+ m_geometry = nullptr;
+ if (std::make_shared(mesh_disassembler->geometryId())->dispatch())
+ {
+ m_geometry = std::make_shared(mesh_disassembler->geometryId())->dispatch();
+ }
+ else if (mesh_disassembler->rawGeometryDto().has_value())
+ {
+ m_geometry = std::make_shared(mesh_disassembler->geometryId(), mesh_disassembler->rawGeometryDto().value())->dispatch();
+ }
+ m_lazyStatus.changeStatus(Frameworks::LazyStatus::Status::Ghost);
+ m_renderBuffer = nullptr;
+ m_elements.clear();
+ m_materials = mesh_disassembler->materials();
+ m_renderListID = mesh_disassembler->renderListID();
+ m_bound = disassembler->boundingVolume();
+ if (disassembler->animatorId().has_value()) animatorId(disassembler->animatorId().value());
+ selectVisualTechnique(disassembler->visualTechniqueSelection());
+ calculateBoundingVolume(true);
+}
+
+std::shared_ptr MeshPrimitive::intersector(const MathLib::Ray3& ray)
+{
+ return std::make_shared(std::dynamic_pointer_cast(shared_from_this()), ray);
+}
+
+std::shared_ptr MeshPrimitive::getMaterial(unsigned index)
+{
+ if (index >= m_materials.size()) return nullptr;
+ return m_materials[index];
+}
+
+unsigned MeshPrimitive::getMaterialCount() const
+{
+ return static_cast(m_materials.size());
+}
+
+void MeshPrimitive::changeMaterials(const std::vector>& materials)
+{
+ loosePrimitiveMaterials();
+ m_materials = materials;
+ bindPrimitiveMaterials();
+}
+
+void MeshPrimitive::rebindMaterials()
+{
+ loosePrimitiveMaterials();
+ bindPrimitiveMaterials();
+}
+
+void MeshPrimitive::changeSemanticTexture(const Engine::EffectSemanticTexture& semantic_texture)
+{
+ if (m_materials.empty()) return;
+ loosePrimitiveMaterials();
+ for (auto& mat : m_materials)
+ {
+ if (mat == nullptr) continue;
+ mat->changeSemanticTexture(semantic_texture);
+ }
+ bindPrimitiveMaterials();
+}
+
+void MeshPrimitive::bindSemanticTexture(const Engine::EffectSemanticTexture& semantic_texture)
+{
+ if (m_materials.empty()) return;
+ loosePrimitiveMaterials();
+ for (auto& mat : m_materials)
+ {
+ if (mat == nullptr) continue;
+ mat->bindSemanticTexture(semantic_texture);
+ }
+ bindPrimitiveMaterials();
+}
+
+void MeshPrimitive::bindSemanticTextures(const std::vector& textures)
+{
+ if (m_materials.empty()) return;
+ loosePrimitiveMaterials();
+ for (auto& mat : m_materials)
+ {
+ if (mat == nullptr) continue;
+ mat->bindSemanticTextures(textures);
+ }
+ bindPrimitiveMaterials();
+}
+
+error MeshPrimitive::updateRenderBuffer()
+{
+ assert(m_geometry);
+ if (!m_renderBuffer) return ErrorCode::nullRenderBuffer;
+ const error er = m_renderBuffer->updateVertex(m_geometry->getVertexMemory(), m_geometry->getIndexMemory());
+ return er;
+}
+
+error MeshPrimitive::rangedUpdateRenderBuffer(unsigned vtx_offset, unsigned vtx_count,
+ std::optional idx_offset, std::optional idx_count)
+{
+ assert(m_geometry);
+ if (!m_renderBuffer) return ErrorCode::nullRenderBuffer;
+ std::optional idx_memory;
+ if (idx_count && idx_offset) idx_memory = m_geometry->getRangedIndexMemory(idx_offset.value(), idx_count.value());
+ const error er = m_renderBuffer->rangedUpdateVertex(m_geometry->getRangedVertexMemory(vtx_offset, vtx_count), idx_memory);
+ return er;
+}
+
+error MeshPrimitive::insertToRendererWithTransformUpdating(const std::shared_ptr& renderer,
+ const MathLib::Matrix4& mxWorld, const Engine::RenderLightingState& lightingState)
+{
+ if (!m_lazyStatus.isReady()) return ErrorCode::ok;
+ const auto render = std::dynamic_pointer_cast(renderer);
+ if (FATAL_LOG_EXPR(!render)) return ErrorCode::nullRenderer;
+ m_mxPrimitiveWorld = mxWorld;
+ if (testPrimitiveFlag(Primitive_UnRenderable)) return ErrorCode::ok;
+
+ if (FATAL_LOG_EXPR(m_elements.empty())) return ErrorCode::emptyRenderElementList;
+
+ error er = ErrorCode::ok;
+ for (auto& ele : m_elements)
+ {
+ er = render->insertRenderElement(ele, mxWorld, lightingState, m_renderListID);
+ if (er) return er;
+ }
+ return er;
+}
+
+error MeshPrimitive::removeFromRenderer(const std::shared_ptr& renderer)
+{
+ const auto render = std::dynamic_pointer_cast(renderer);
+ if (FATAL_LOG_EXPR(!render)) return ErrorCode::nullRenderer;
+ if (FATAL_LOG_EXPR(m_elements.empty())) return ErrorCode::emptyRenderElementList;
+ for (auto& ele : m_elements)
+ {
+ render->removeRenderElement(ele, m_renderListID);
+ }
+ return ErrorCode::ok;
+}
+
+void MeshPrimitive::calculateBoundingVolume(bool axis_align)
+{
+ if (m_geometry)
+ {
+ m_geometry->calculateBoundingVolume(axis_align);
+ m_bound = m_geometry->getBoundingVolume();
+ }
+}
+
+void MeshPrimitive::updateWorldTransform(const MathLib::Matrix4& mxWorld)
+{
+ m_mxPrimitiveWorld = mxWorld;
+}
+
+void MeshPrimitive::selectVisualTechnique(const std::string& techniqueName)
+{
+ Primitive::selectVisualTechnique(techniqueName);
+ for (const auto& mat : m_materials)
+ {
+ if (mat) mat->selectVisualTechnique(techniqueName);
+ }
+}
+
+void MeshPrimitive::linkGeometryData(const Geometries::GeometryDataPtr& geo, const Engine::RenderBufferPtr& render_buffer)
+{
+ cleanupGeometry();
+ m_geometry = geo;
+ m_renderBuffer = render_buffer;
+ m_bound = m_geometry->getBoundingVolume();
+}
+
+void MeshPrimitive::changeEffectInSegment(unsigned index, const std::shared_ptr& effect)
+{
+ if (index >= m_materials.size()) return;
+ looseSegmentMaterial(index);
+ if (m_materials[index]) m_materials[index]->changeEffect(effect);
+ bindSegmentMaterial(index);
+}
+
+void MeshPrimitive::changeEffects(const EffectMaterialList& effects)
+{
+ loosePrimitiveMaterials();
+ for (unsigned i = 0; i < effects.size() && i < m_materials.size(); i++)
+ {
+ if (m_materials[i]) m_materials[i]->changeEffect(effects[i]);
+ }
+ bindPrimitiveMaterials();
+}
+
+void MeshPrimitive::changeTextureMapInSegment(unsigned index, const Engine::EffectTextureMap& tex_map)
+{
+ if (index >= m_materials.size()) return;
+ looseSegmentMaterial(index);
+ if (m_materials[index]) m_materials[index]->changeTextureMap(tex_map);
+ bindSegmentMaterial(index);
+}
+
+void MeshPrimitive::changeTextureMaps(const TextureMapList& tex_maps)
+{
+ loosePrimitiveMaterials();
+ for (unsigned i = 0; i < tex_maps.size() && i < m_materials.size(); i++)
+ {
+ if (m_materials[i]) m_materials[i]->changeTextureMap(tex_maps[i]);
+ }
+ bindPrimitiveMaterials();
+}
+
+void MeshPrimitive::createRenderElements()
+{
+ assert(m_geometry);
+ assert(m_renderBuffer);
+ unsigned elem_count = m_geometry->getSegmentCount();
+ if (elem_count > m_materials.size()) elem_count = static_cast(m_materials.size());
+ assert(elem_count > 0);
+ m_elements.clear();
+ m_elements.reserve(elem_count);
+ for (unsigned i = 0; i < elem_count; i++)
+ {
+ m_elements.emplace_back(std::make_shared(m_renderBuffer, m_materials[i]->effectMaterial(), m_geometry->getSegment(i)));
+ }
+}
+
+void MeshPrimitive::cleanupGeometry()
+{
+ m_elements.clear();
+ m_geometry = nullptr;
+ m_renderBuffer = nullptr;
+}
+
+void MeshPrimitive::bindPrimitiveMaterials()
+{
+ if (m_materials.empty()) return;
+ for (auto& mat : m_materials)
+ {
+ if (mat == nullptr) continue;
+ mat->assignShaderTextures();
+ }
+}
+
+void MeshPrimitive::bindSegmentMaterial(unsigned index)
+{
+ if (index >= m_materials.size()) return;
+ if (m_materials[index] == nullptr) return;
+ m_materials[index]->assignShaderTextures();
+}
+
+void MeshPrimitive::loosePrimitiveMaterials()
+{
+ if (m_materials.empty()) return;
+ for (auto& mat : m_materials)
+ {
+ if (mat == nullptr) continue;
+ mat->unassignShaderTextures();
+ }
+}
+
+void MeshPrimitive::looseSegmentMaterial(unsigned index)
+{
+ if (index >= m_materials.size()) return;
+ if (m_materials[index] == nullptr) return;
+ m_materials[index]->unassignShaderTextures();
+}
diff --git a/StdAny/StdAny/StdAny.cpp b/StdAny/StdAny/StdAny.cpp
index f795655..76ae59b 100644
--- a/StdAny/StdAny/StdAny.cpp
+++ b/StdAny/StdAny/StdAny.cpp
@@ -20,13 +20,16 @@ class obj
m_a = o.m_a;
std::cout << "obj " << m_a << " copied. \n";
}
-
+
~obj()
{
std::cout << " obj " << m_a << " destruct. \n";
}
+ int& a() { return m_a; }
+private:
int m_a;
+ static int m_b;
};
@@ -41,10 +44,10 @@ class bank
{
std::cout << "bank destruct. \n";
}
- void set(const std::any& a)
+ void set(const std::any& aB)
{
std::cout << "set bank data. \n";
- m_data = a;
+ m_data = aB;
}
//template void set_org(T a) /// 如果參數type 直接寫 T, 會複製傳進來
@@ -54,7 +57,7 @@ class bank
m_data = a; /// 這裡會複製
}
- const std::any& get()
+ const std::any& get()
{
std::cout << "get bank data. \n";
return m_data;
@@ -77,6 +80,7 @@ class bank
return std::ref(m_data);
}
std::any m_data;
+ std::vector m_org_data;
//const std::any& m_data; /// 不能用
};
using func_any = std::function;
@@ -103,7 +107,7 @@ void func(const std::any& a)
//auto ob = std::any_cast>(std::cref(a)); /// 這樣不能轉
auto ob = std::any_cast>>(a1); /// by reference 要這樣轉
//ob[3].m_a = -1; // by value
- ob.get()[3].m_a = -1; // by reference, 要從 wrapper 裡 get 出來
+ ob.get()[3].a() = -1; // by reference, 要從 wrapper 裡 get 出來
std::cout << "func return\n"; /// 參數跟著內部變數一起解構
}
@@ -153,7 +157,7 @@ int main()
//auto ov2 = b->get_org&>(); /// 有複製, auto 不是引用
//auto& ov2 = b->get_org&>(); /// 沒有複製
auto& ov2 = b->get_org&>(); /// 沒有複製, 可以取出來改
- ov2[8].m_a = -23;
+ ov2[8].a() = -23;
std::cout << "end block\n";
}
/// 結論是, any 是 value, 建構或指定時會複製一份; 取值時並不會特別複製一份
diff --git a/StdAny/StdAny/StdAny.vcxproj b/StdAny/StdAny/StdAny.vcxproj
index 2efad0c..daab58c 100644
--- a/StdAny/StdAny/StdAny.vcxproj
+++ b/StdAny/StdAny/StdAny.vcxproj
@@ -85,6 +85,7 @@
false
$(SolutionDir)$(PlatformTarget)\$(Configuration)\
+ true
@@ -138,6 +139,7 @@
NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
true
stdcpp17
+ ../../
Console
@@ -147,6 +149,7 @@
+
diff --git a/StdAny/StdAny/StdAny.vcxproj.filters b/StdAny/StdAny/StdAny.vcxproj.filters
index 256f713..df9fc23 100644
--- a/StdAny/StdAny/StdAny.vcxproj.filters
+++ b/StdAny/StdAny/StdAny.vcxproj.filters
@@ -18,5 +18,8 @@
來源檔案
+
+ 來源檔案
+
\ No newline at end of file