From c02da85d08b44646500c1596d81c4e837c129cb2 Mon Sep 17 00:00:00 2001 From: Sarah GLINER Date: Wed, 23 Aug 2023 16:35:05 +0200 Subject: [PATCH] add create_app_list --- create_app_list/.gitconfig | 2 + create_app_list/app_load_params_check.py | 175 ++++++++++++++++++ .../app_load_params_gen_db_full.py | 138 ++++++++++++++ create_app_list/app_load_params_utils.py | 38 ++++ create_app_list/clone_repos.py | 77 ++++++++ create_app_list/main.py | 69 +++++++ create_app_list/makefile_dump.py | 139 ++++++++++++++ create_app_list/parse_github.py | 56 ++++++ 8 files changed, 694 insertions(+) create mode 100644 create_app_list/.gitconfig create mode 100755 create_app_list/app_load_params_check.py create mode 100755 create_app_list/app_load_params_gen_db_full.py create mode 100755 create_app_list/app_load_params_utils.py create mode 100755 create_app_list/clone_repos.py create mode 100755 create_app_list/main.py create mode 100755 create_app_list/makefile_dump.py create mode 100755 create_app_list/parse_github.py diff --git a/create_app_list/.gitconfig b/create_app_list/.gitconfig new file mode 100644 index 0000000..fd049c6 --- /dev/null +++ b/create_app_list/.gitconfig @@ -0,0 +1,2 @@ +[url "https://github.com/"] + insteadOf = ssh://git@github.com/ diff --git a/create_app_list/app_load_params_check.py b/create_app_list/app_load_params_check.py new file mode 100755 index 0000000..f96f801 --- /dev/null +++ b/create_app_list/app_load_params_check.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python3 + +from argparse import ArgumentParser +from pathlib import Path +from typing import Dict +from collections import defaultdict +import json + +APP_LOAD_PARAMS_ALLOWED = { + "targetId", + "targetVersion", + "apiLevel", + "fileName", + "icon", + "curve", + "path", + "path_slip21", + "appName", + # "signature", # Reserved for internal usage + # "signApp", # Reserved for internal usage + "appFlags", + # "bootAddr", # Deprecated? + # "rootPrivateKey", # Should not be used for app deployment + # "signPrivateKey", # Should not be used for app deployment + # "apdu", # Should not be used for app deployment + # "deployLegacy", # Deprecated? + "delete", + # "params", # Deprecated? + "tlv", + "dataSize", + "appVersion", + # "offline", # Should not be used for app deployment + # "offlineText", # Should not be used for app deployment + # "installparamsSize", # Deprecated? + "tlvraw", + # "dep", # Deprecated? + "nocrc", +} + +APP_LOAD_PARAMS_VALUE_CHECK = { + "curve", + "path", + "path_slip21", + "appName", + "appFlags", +} + + +def parse_listapploadparams(app_load_params_str: str) -> Dict: + # Convert to dict. Store value in list type as some params can appear + # multiple times (e.g `--path`). + + app_load_params = defaultdict(list) + for param in app_load_params_str.split("--"): + param = param.strip() + if not param: + continue + + if param.startswith("targetVersion="): + parts = param.split("=") + else: + parts = param.split(" ") + + param_name = parts[0] + + param_value = None + if len(parts) > 1: + param_value = " ".join(parts[1:]) + + app_load_params[param_name].append(param_value) + + return dict(app_load_params) + + +def check_manifest(manifest: dict, database: dict) -> None: + ret = 0 + + for variant, data in manifest["VARIANTS"].items(): + target = data["TARGET"] + print(f"Checking for target '{target}' and variant '{variant}'") + + app_load_params_str = data["APP_LOAD_PARAMS"] + app_load_params = parse_listapploadparams(app_load_params_str) + print("Retrieved listapploadparams:") + print(json.dumps(app_load_params, indent=4)) + + # Check that no unknown or reserved param is used + for key in app_load_params: + if key not in APP_LOAD_PARAMS_ALLOWED: + print(f"[ERROR] Not allowed '{key}' in APP_LOAD_PARAMS") + ret = -1 + + # Retrieve database app_params + app_params_ref = database.get(variant) + if not app_params_ref: + print(f"[ERROR] Missing '{variant}' definition in the database") + ret = -1 + break + + # Check that the params match with the one from the database + for key in APP_LOAD_PARAMS_VALUE_CHECK: + app_params_ref_value = app_params_ref.get(key) + app_load_params_value = app_load_params.get(key) + if key == "appName": + if len(app_load_params_value) != 1: + print(f"[ERROR] Expected a single value for 'appName' ({app_load_params_value} vs {app_params_ref_value})") + ret = -1 + continue + app_load_params_value = app_load_params_value[0] + elif key == "appFlags": + if not app_load_params_value: + app_load_params_value = ["0x000"] + + if len(app_load_params_value) != 1: + print(f"[ERROR] Expected a single value for 'appFlags' ({app_load_params_value} vs {app_params_ref_value})") + ret = -1 + continue + + app_load_params_value = app_load_params_value[0] + if app_load_params_value.startswith("0x"): + app_load_params_value = int(app_load_params_value, 16) + else: + app_load_params_value = int(app_load_params_value) + + app_params_ref_value = app_params_ref_value.get(target) + if not app_params_ref_value: + print(f"[ERROR] Missing 'appFlags' for '{target}'") + ret = -1 + continue + if app_params_ref_value.startswith("0x"): + app_params_ref_value = int(app_params_ref_value, 16) + else: + app_params_ref_value = int(app_params_ref_value) + + if not app_load_params_value == app_params_ref_value: + print(f"[ERROR] Unexpected value for '{key}' ({app_load_params_value} vs {app_params_ref_value})") + ret = -1 + + return ret + + +def check_app(app_manifests_path: Path, database_path: Path) -> None: + ret = 0 + + # Retrieve database + with open(database_path, 'r') as f: + database = json.load(f) + + manifest_list = [x for x in app_manifests_path.iterdir() if x.name.endswith(".json")] + for manifest_path in manifest_list: + # Retrieve manifest + with open(manifest_path, 'r') as f: + manifest = json.load(f) + + print(f"Checking {manifest_path.name}") + ret |= check_manifest(manifest, database) + + if ret: + print("Please fix the issues by either:") + print("- Updating your app Makefile") + print("- Creating a PR on https://github.com/LedgerHQ/ledger-app-database" + " to update the app-params-database.json") + + exit(ret) + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--app_manifests_path", required=True, type=Path) + parser.add_argument("--database_path", required=True, type=Path) + + args = parser.parse_args() + + check_app(args.app_manifests_path, args.database_path) diff --git a/create_app_list/app_load_params_gen_db_full.py b/create_app_list/app_load_params_gen_db_full.py new file mode 100755 index 0000000..8d0cc35 --- /dev/null +++ b/create_app_list/app_load_params_gen_db_full.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 + +from argparse import ArgumentParser +from pathlib import Path +from app_load_params_utils import load_database, save_database +from makefile_dump import get_app_listvariants, get_app_listparams +from app_load_params_check import APP_LOAD_PARAMS_VALUE_CHECK, parse_listapploadparams +import json +from collections import namedtuple + +Models = namedtuple('Models', ['sdk_value', 'device_name']) + +MODELS = [Models("$NANOS_SDK", "nanos"), + Models("$NANOX_SDK", "nanox"), + Models("$NANOSP_SDK", "nanosp"), + Models("$STAX_SDK", "stax")] + + +BUILD_PATH_LIST = { + "app-acala" : "app" , + "app-algorand" : "app" , + "app-avalanche" : "app" , + "app-arweave" : "app" , + "app-alephzero" : "app" , + "app-astar" : "app" , + "app-axelar" : "app" , + "app-bifrost" : "app" , + "app-bifrost-kusama" : "app" , + "app-bifrost-new" : "app" , + "app-blockstack" : "app" , + "app-coti" : "app" , + "app-casper" : "app" , + "app-centrifuge" : "app" , + "app-cosmos" : "app" , + "app-cryptocom" : "app" , + "app-dgld" : "app" , + "app-decimal" : "app" , + "app-desmos" : "app" , + "app-dock" : "app" , + "app-edgeware" : "app" , + "app-equilibrium" : "app" , + "app-filecoin" : "app" , + "app-firmachain" : "app" , + "app-flow" : "app" , + "app-genshiro" : "app" , + "app-iov" : "app" , + "app-internetcomputer" : "app" , + "app-karura" : "app" , + "app-khala" : "app" , + "app-kusama" : "app" , + "app-medibloc" : "app" , + "app-near" : "workdir/app-near" , + "app-nodle" : "app" , + "app-oasis" : "app" , + "app-panacea" : "app" , + "app-parallel" : "app" , + "app-persistence" : "app" , + "app-phala" : "app" , + "app-polkadex" : "app" , + "app-polkadot" : "app" , + "app-polymesh" : "app" , + "app-reef" : "app" , + "app-secret" : "app" , + "app-stacks" : "app" , + "app-statemine" : "app" , + "app-statemint" : "app" , + "app-thorchain" : "app" , + "app-terra" : "app" , + "app-xxnetwork" : "app" , +} + + +def gen_variant(app_name: str, output_file: Path = "", workdir: Path = "") -> dict: + print(f"Generating for {app_name}") + + app_build_path = BUILD_PATH_LIST.get(app_name, "./") + if app_build_path != "./": + app_full_path = workdir / app_name / app_build_path + else: + app_full_path = workdir / app_name + + # Retrieve database + + database_params = { + "name": app_name, + } + + # Retrieve available variants + for model in MODELS: + try: + variant_param_name, variants = get_app_listvariants(app_full_path, model.sdk_value, allow_failure=True) + except: + print("Skipping generation due to error") + continue + + database_params["variant_param"] = variant_param_name + database_params["variants_" + model.device_name] = variants + if app_build_path != "./": + database_params["build_path"] = app_build_path + + return database_params + + +def gen_all_variants(config: str, output_file: Path = "", workdir: Path = "") -> str: + output = [] + for key in config: + if key["name"]: + output.append(gen_variant(key["name"], output_file, workdir)) + return output + + +def get_variants(input_file: Path = "", input_list: str = "", output_file: Path = "", workdir: Path = "") -> str: + if input_file: + config = load_database(input_file) + else: + config = input_list + + output = gen_all_variants(config, output_file, workdir) + + if output_file: + save_database(output, output_file) + + return output + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--config_file", required=True, type=Path) + parser.add_argument("--database_path", required=True, type=Path) + + args = parser.parse_args() + + if args.config_file is not None: + config = load_database(args.config_file) + gen_all_variants(config, args.database_path) + else: + parser.print_help() diff --git a/create_app_list/app_load_params_utils.py b/create_app_list/app_load_params_utils.py new file mode 100755 index 0000000..0dfb391 --- /dev/null +++ b/create_app_list/app_load_params_utils.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 + +from pathlib import Path +import json + + +def format_database(database: dict) -> str: + database_str = json.dumps(database, indent=2, sort_keys=True) + # Drop some newlines to compact a bit the data while still + # making it readable. + database_str = database_str.replace("[\n ", "[") + database_str = database_str.replace("{\n ", "{") + database_str = database_str.replace("\n ]", "]") + database_str = database_str.replace("\n }", "}") + database_str = database_str.replace("\n ", " ") + + # Add newline at the end of file + database_str += "\n" + + return database_str + + +def load_database(database_path: Path): + database = {} + if database_path.exists(): + with open(database_path, 'r') as f: + database = json.load(f) + else: + with open(database_path, 'w') as f: + print("File created:", database_path) + database = [] + return database + + +def save_database(database: dict, database_path: Path): + database_str = format_database(database) + with open(database_path, 'w') as f: + f.write(database_str) diff --git a/create_app_list/clone_repos.py b/create_app_list/clone_repos.py new file mode 100755 index 0000000..c99f9c1 --- /dev/null +++ b/create_app_list/clone_repos.py @@ -0,0 +1,77 @@ +import requests +import json +from pathlib import Path +import subprocess + + +base_url = "https://api.github.com" +org_name = "LedgerHQ" + +repos_endpoint = f"{base_url}/orgs/{org_name}/repos" + +params = { + "type": "public", + "archived": "false", + "sort": "full_name", + "page": 1, + "per_page": 100 +} + +headers = { + "Authorization": f"Bearer {access_token}", + "Accept": "application/vnd.github.v3+json" +} + +filtered_repos = [] + +def run_cmd(cmd: str, + cwd: Path, + print_output: bool = True, + no_throw: bool = False) -> str: + print(f"[run_cmd] Running: {cmd} from {cwd}") + + ret = subprocess.run(cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + cwd=cwd) + if no_throw is False and ret.returncode: + print(f"[run_cmd] Error {ret.returncode} raised while running cmd: {cmd}") + print("[run_cmd] Output was:") + print(ret.stdout) + raise ValueError() + + if print_output: + print(f"[run_cmd] Output:\n{ret.stdout}") + + return ret.stdout.strip() + + +def load_file(input_file: Path = "") -> str: + if input_file.exists(): + with open(input_file, 'r') as f: + return json.load(f) + else: + print(f"The file {input_file} does not exist") + exit() + + +def clone_repos_from_list(repo_file: Path = "", repo_list: str = "", workdir: Path = ""): + GIT_CONFIG = ' -c url."https://github.com/".insteadOf="git@github.com:" -c url."https://".insteadOf="git://"' + + if repo_file: + repo_json = load_file(repo_file) + else: + repo_json = repo_list + + for repo in repo_json: + if repo['url']: + if not Path.exists(workdir / repo['name']): + run_cmd(f" git {GIT_CONFIG} clone {repo['url']} --recurse-submodules", cwd=workdir) + print(f" git {GIT_CONFIG} clone {repo['url']}") + else: + run_cmd("git pull", cwd=workdir/repo['name']) + run_cmd("git submodule update --recursive", cwd=workdir/repo['name']) + print(f" git pull and update submodules {repo['url']}") + diff --git a/create_app_list/main.py b/create_app_list/main.py new file mode 100755 index 0000000..2ed62a2 --- /dev/null +++ b/create_app_list/main.py @@ -0,0 +1,69 @@ +from argparse import ArgumentParser +from pathlib import Path +import json + +from parse_github import parse_github +from clone_repos import clone_repos_from_list +import sys +sys.path.append('..') +from app_load_params_gen_db_full import get_variants +from app_load_params_utils import load_database, save_database + +import os + +REPO_OUTPUT_FILE = "repo_out.txt" +VARIANT_OUTPUT_FILE = "variant_out.txt" + + +def merge_db(json1: dict, json2: dict, merging_key: str) -> str: + merged_data = {} + + for obj in json1 + json2: + key_value = obj[merging_key] + if key_value not in merged_data: + merged_data[key_value] = obj + else: + merged_data[key_value].update(obj) + + #merged_json = json.dumps(list(merged_data.values())) + + return list(merged_data.values()) + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--workdir", required=False, type=Path, default="workdir") + parser.add_argument("--repo_file", required=False, type=Path) + parser.add_argument("--output_file", required=False, type=Path) + parser.add_argument("--no_update", required=False, action='store_true') + + args = parser.parse_args() + + if args.workdir and not os.path.exists(args.workdir): + os.makedirs(args.workdir) + print(f"Directory '{args.workdir}' created.") + + if not args.repo_file: + print("Parsing github") + repos_list = parse_github(output_file=REPO_OUTPUT_FILE) + + else: + repos_list = load_database(args.repo_file) + + if not args.no_update: + print("Cloning repo") + clone_repos_from_list(repo_list=repos_list, workdir=args.workdir) + + print("Gen variants") + variant_list = get_variants(input_list=repos_list, workdir=args.workdir, + output_file=VARIANT_OUTPUT_FILE) + + print("Merging") + full_output = merge_db(repos_list, variant_list, "name") + print(full_output) + + if args.output_file: + save_database(full_output, args.output_file), + save_database(repos_list, "t1"), + save_database(variant_list, "t2"), diff --git a/create_app_list/makefile_dump.py b/create_app_list/makefile_dump.py new file mode 100755 index 0000000..7af2ad4 --- /dev/null +++ b/create_app_list/makefile_dump.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 + +""" +This is a pure duplicate of +https://github.com/LedgerHQ/ledger-app-workflows/blob/master/scripts/makefile_dump.py +This is to allow easily generating the db from the apps code. +""" + +import subprocess +from argparse import ArgumentParser +from pathlib import Path +from typing import Tuple, List, Dict +from tempfile import NamedTemporaryFile +import json + + +LISTPARAMS_MAKEFILE = """ +listparams: +\t@echo Start dumping params +\t@echo APP_LOAD_PARAMS=$(APP_LOAD_PARAMS) +\t@echo GLYPH_FILES=$(GLYPH_FILES) +\t@echo ICONNAME=$(ICONNAME) +\t@echo TARGET=$(TARGET) +\t@echo TARGET_NAME=$(TARGET_NAME) +\t@echo TARGET_ID=$(TARGET_ID) +\t@echo APPNAME=$(APPNAME) +\t@echo APPVERSION=$(APPVERSION) +\t@echo API_LEVEL=$(API_LEVEL) +\t@echo SDK_NAME=$(SDK_NAME) +\t@echo SDK_VERSION=$(SDK_VERSION) +\t@echo SDK_HASH=$(SDK_HASH) +\t@echo Stop dumping params +""" + + +def run_cmd(cmd: str, + cwd: Path, + print_output: bool = False, + no_throw: bool = False) -> str: + print(f"[run_cmd] Running: {cmd} from {cwd}") + + ret = subprocess.run(cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + cwd=cwd) + if no_throw is False and ret.returncode: + print(f"[run_cmd] Error {ret.returncode} raised while running cmd: {cmd}") + print("[run_cmd] Output was:") + print(ret.stdout) + raise ValueError() + + if print_output: + print(f"[run_cmd] Output:\n{ret.stdout}") + + return ret.stdout.strip() + + +def get_app_listvariants(app_build_path: Path, sdk: str = "$NANOS_SDK", allow_failure: bool = False) -> Tuple[str, List[str]]: + # Using listvariants Makefile target + listvariants = run_cmd(f"make BOLOS_SDK={sdk} listvariants", cwd=app_build_path, no_throw=allow_failure) + if "VARIANTS" not in listvariants: + raise ValueError(f"Invalid variants retrieved: {listvariants}") + + # Drop Makefile logs previous to listvariants output + listvariants = listvariants.split("VARIANTS ")[1] + listvariants = listvariants.split("\n")[0] + + variants = listvariants.split(" ") + variant_param_name = variants.pop(0) + assert variants, "At least one variant should be defined in the app Makefile" + return variant_param_name, variants + + +def get_app_listparams(app_build_path: Path, + variant_param: str) -> Dict: + with NamedTemporaryFile(suffix='.mk') as tmp: + tmp_file = Path(tmp.name) + + with open(tmp_file, "w") as f: + f.write(LISTPARAMS_MAKEFILE) + + ret = run_cmd(f"make -f Makefile -f {tmp_file} listparams {variant_param}", + cwd=app_build_path) + + ret = ret.split("Start dumping params\n")[1] + ret = ret.split("\nStop dumping params")[0] + + listparams = {} + for line in ret.split("\n"): + if "=" not in line: + continue + + if "APP_LOAD_PARAMS=" in line: + app_load_params_str = line.replace("APP_LOAD_PARAMS=", "") + listparams["APP_LOAD_PARAMS"] = app_load_params_str + else: + key, value = line.split("=") + listparams[key] = value + + return listparams + + +def save_app_params(app_build_path: Path, json_path: Path) -> None: + + # Retrieve available variants + variant_param_name, variants = get_app_listvariants(app_build_path) + + ret = { + "VARIANT_PARAM": variant_param_name, + "VARIANTS": {} + } + + for variant in variants: + print(f"Checking for variant: {variant}") + + app_params = get_app_listparams(app_build_path, + variant_param=f"{variant_param_name}={variant}") + + ret["VARIANTS"][variant] = app_params + + with open(json_path, "w") as f: + json.dump(ret, f, indent=4) + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--app_build_path", + help="App build path, e.g. ", + required=True) + parser.add_argument("--json_path", + help="Json path to store the output", + required=True) + + args = parser.parse_args() + + save_app_params(args.app_build_path, args.json_path) diff --git a/create_app_list/parse_github.py b/create_app_list/parse_github.py new file mode 100755 index 0000000..c55c695 --- /dev/null +++ b/create_app_list/parse_github.py @@ -0,0 +1,56 @@ +import requests +import json +from pathlib import Path + +base_url = "https://api.github.com" +org_name = "LedgerHQ" + +repos_endpoint = f"{base_url}/orgs/{org_name}/repos" + +params = { + "type": "public", + "archived": "false", + "sort": "full_name", + "page": 1, + "per_page": 100 +} + +headers = { + "Authorization": f"Bearer {access_token}", + "Accept": "application/vnd.github.v3+json" +} + +repos = [] + + +def parse_github(output_file: Path = "") -> str: + while True: + response = requests.get(repos_endpoint, params=params, headers=headers) + repos_data = response.json() + if not repos_data: # No more repositories to fetch + break + for repo in repos_data: + if repo["archived"]: + continue + + if repo["name"].lower().startswith("app-"): + repo_name = repo["name"] + owner_name = repo["owner"]["login"] + repo_url = repo["html_url"] + if repo["fork"]: + parent_response = requests.get(repo["url"], + headers=headers) + parent_data = parent_response.json() + if "parent" in parent_data: + owner_name = parent_data["parent"]["owner"]["login"] + repos.append({"name": repo_name, "owner": owner_name, + "url": repo_url}) + + params["page"] += 1 + + if output_file: + # Write the filtered repository names to a JSON file + with open(output_file, "w") as f: + json.dump(repos, f, indent=4) + + return repos