Skip to content
This repository has been archived by the owner on Jan 29, 2024. It is now read-only.

adds gh pr to update_infor #300

Merged
merged 2 commits into from
Dec 4, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 72 additions & 28 deletions tools/update_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,44 +10,57 @@
import itertools
from collections import defaultdict

with open('cookiecutter.json') as f:
with open("cookiecutter.json") as f:
data = json.load(f)

db_resources = data['__prompts__']['db_resource'].items()
web_frameworks = data['__prompts__']['project_backend'].items()
deployment_hosts = data['__prompts__']['project_host'].items()
db_resources = data["__prompts__"]["db_resource"].items()
web_frameworks = [x for x in data["__prompts__"]["project_backend"].items() if x[0] == "fastapi"]
kjaymiller marked this conversation as resolved.
Show resolved Hide resolved
deployment_hosts = data["__prompts__"]["project_host"].items()


combos = list(itertools.product(web_frameworks, db_resources, deployment_hosts))
random_cc_folder_prefix = ''.join([str(chr(random.randint(65, 90))) for _ in range(20)])
random_cc_folder_prefix = "".join([str(chr(random.randint(65, 90))) for _ in range(20)])


def get_azure_combinations() -> Generator[tuple[str, str], None, None]:
"""Returns the base_keys and base_values for the combinations"""
for framework, db_resource, host in combos:
base_keys= (f"azure-{framework[0]}/{db_resource[0]}/{host[0]}").lower().replace('/','-')
base_keys = (
(f"azure-{framework[0]}/{db_resource[0]}/{host[0]}")
.lower()
.replace("/", "-")
)

if re.findall('__prompt', base_keys):
if re.findall("__prompt", base_keys):
continue
base_values = f"azure-{framework[0]}-{db_resource[0]}-{host[0]}"
base_values = f"azure-{framework[0]}-{db_resource[0]}-{host[0]}"
yield base_keys, base_values


app = typer.Typer()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.FileHandler("update_info.log")

@app.command()
def metadata_list():
metadata_dict = defaultdict(list)

for framework, db_resource, host in combos:
base_keys= (f"azure-{framework[0]}/{db_resource[0]}/{host[0]}").lower().replace('/','-')
if re.findall('__prompt', base_keys):
base_keys = (
(f"azure-{framework[0]}/{db_resource[0]}/{host[0]}")
.lower()
.replace("/", "-")
)
if re.findall("__prompt", base_keys):
continue
base_values = f"azure-{framework[0]}-{db_resource[0]}-{host[0]}"
metadata_dict[framework[0]].append(base_values)

with open("metadata.json", "w") as outfile:
json.dump(metadata_dict, outfile, indent=4)


@app.command()
def update_all_repos():
"""
Expand All @@ -59,34 +72,54 @@ def update_all_repos():
- Creates pull request for the updates
"""

logging.debug("Creating Placeholder Folder")
logger.debug("Creating Placeholder Folder")
base_path = pathlib.Path(f"update_repos/{random_cc_folder_prefix}")
base_path.mkdir(parents=True, exist_ok=True)

logging.debug("Cloning Repos and Running Cruft")
logger.debug("Cloning Repos and Running Cruft")
for base_key, base_values in get_azure_combinations():
base_file = base_key.replace('-', '_')
url = (f"[email protected]:Azure-Samples/{base_key}.git")
cmd = ["git", "clone", url, base_file]
base_file = base_key.replace("-", "_")
url = f"[email protected]:Azure-Samples/{base_key}.git"
cmd = ["git", "clone", url, base_file]
path = base_path.joinpath(base_file)

try:
subprocess.check_output(
cmd,
text=True,
cwd=base_path,
)

except subprocess.CalledProcessError as e:
logging.error(f"Failed to clone {url}: {e}")
logger.warning(f"Could not to clone {url}: {e}. This is likely a non-existent repo.")
continue

branch_name = f"cruft/update"
subprocess.check_output(
["git", "checkout", "-b", random_cc_folder_prefix],
["git", "checkout", "-b", branch_name],
text=True,
cwd=path,
)
cruft.update(path, skip_apply_ask=True)

cruft.update(
path, skip_apply_ask=True, extra_context={"python_version": "3.12"}
)

if not subprocess.check_output(
["git", "status", "--porcelain"],
text=True,
cwd=path,
):
logger.info(f"No changes for {base_file}, skipping.")
continue

if subprocess.check_output(
["ls -a" "**/*.rej"],
text=True,
cwd=path,
):
logger.error(f"Rejection files found for {base_file}!")
exit(1)

subprocess.check_output(
["git", "add", "."],
text=True,
Expand All @@ -98,7 +131,14 @@ def update_all_repos():
cwd=path,
)
subprocess.check_output(
["git", "push", "--set-upstream", "origin", random_cc_folder_prefix],
["git", "push", "--set-upstream", "origin", branch_name],
text=True,
cwd=path,
)
subprocess.check_output(
["gh", "pr", "create", "--fill", "--reviewer", "kjaymiller,pamelafox"],
text=True,
cwd=path,
)


Expand All @@ -107,12 +147,16 @@ def update_readme():
web_framework_values = defaultdict(list)

for framework, db_resource, host in combos:
base_keys= (f"azure-{framework[0]}/{db_resource[0]}/{host[0]}").lower().replace('/','-')
if re.findall('__prompt', base_keys):
base_keys = (
(f"azure-{framework[0]}/{db_resource[0]}/{host[0]}")
.lower()
.replace("/", "-")
)
if re.findall("__prompt", base_keys):
continue
base_values = (f"{framework[1]} {db_resource[1]} {host[1]}")
url = (f"https://github.com/Azure-Samples/{base_keys}")
md_link = (re.sub(r"\[\w+\].*\[\/\w+\]", "", f"- [{base_values}]({url})"))
base_values = f"{framework[1]} {db_resource[1]} {host[1]}"
url = f"https://github.com/Azure-Samples/{base_keys}"
md_link = re.sub(r"\[\w+\].*\[\/\w+\]", "", f"- [{base_values}]({url})")
web_framework_values[framework[0]].append(md_link)

for key, value in web_framework_values.items():
Expand All @@ -122,8 +166,8 @@ def update_readme():
print(item)
print("\n")


print(f"{len(list(combos))}: Total Combinations")


if __name__ == "__main__":
app()
app()
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ on:
push:
branches:
- main
- cruft/update*
- cruft/update

# GitHub Actions workflow to deploy to Azure using azd
# To configure required secrets for connecting to Azure, simply run `azd pipeline config`
Expand Down
Loading