From 9592b4569b5df9a1963d2ccf560ca04fde9a9eda Mon Sep 17 00:00:00 2001 From: Pieter De Gendt Date: Wed, 16 Oct 2024 10:58:38 +0200 Subject: [PATCH] app: Update Forall command to allow multiple concurrent processes Demonstrate asynchronous behavior for the Forall command and add an argument to select the number of jobs. Signed-off-by: Pieter De Gendt --- src/west/app/project.py | 40 ++++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/src/west/app/project.py b/src/west/app/project.py index f3caf66c..e38a43eb 100644 --- a/src/west/app/project.py +++ b/src/west/app/project.py @@ -6,6 +6,7 @@ '''West project commands''' import argparse +import asyncio from functools import partial import logging import os @@ -1670,16 +1671,15 @@ def do_add_parser(self, parser_adder): parser.add_argument('projects', metavar='PROJECT', nargs='*', help='''projects (by name or path) to operate on; defaults to active cloned projects''') + parser.add_argument('-j', '--jobs', nargs='?', const=-1, + default=1, type=int, action='store', + help='''Use multiple jobs to parallelize commands. + Pass no number or -1 to run commands on all cores.''') return parser - def do_run(self, args, user_args): - failed = [] - group_set = set(args.groups) - env = os.environ.copy() - for project in self._cloned_projects(args, only_active=not args.all): - if group_set and not group_set.intersection(set(project.groups)): - continue - + async def run_for_project(self, project, args, sem): + async with sem: + env = os.environ.copy() env["WEST_PROJECT_NAME"] = project.name env["WEST_PROJECT_PATH"] = project.path env["WEST_PROJECT_ABSPATH"] = project.abspath if project.abspath else '' @@ -1689,12 +1689,24 @@ def do_run(self, args, user_args): cwd = args.cwd if args.cwd else project.abspath - self.banner( - f'running "{args.subcommand}" in {project.name_and_path}:') - rc = subprocess.Popen(args.subcommand, shell=True, env=env, - cwd=cwd).wait() - if rc: - failed.append(project) + self.banner(f'running "{args.subcommand}" in {project.name_and_path}:') + proc = await asyncio.create_subprocess_shell(args.subcommand, + cwd=cwd, env=env, shell=True) + return await proc.wait() + + def do_run(self, args, unknown): + asyncio.run(self.do_run_async(args, unknown)) + + async def do_run_async(self, args, user_args): + sem = asyncio.Semaphore(args.jobs if args.jobs > 0 else os.cpu_count() or sys.maxsize) + + group_set = set(args.groups) + projects = [p for p in self._cloned_projects(args, only_active=not args.all) + if not group_set or group_set.intersection(set(p.groups))] + + rcs = await asyncio.gather(*[self.run_for_project(p, args, sem) for p in projects]) + + failed = [p for (p, rc) in zip(projects, rcs) if rc] self._handle_failed(args, failed) GREP_EPILOG = '''