forked from conda-forge/admin-requests
-
Notifications
You must be signed in to change notification settings - Fork 0
/
archive_feedstock.py
143 lines (118 loc) · 4.08 KB
/
archive_feedstock.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
import os
import sys
import glob
import requests
import subprocess
def get_task_files(task):
exf = os.path.join(task, "example.txt")
return [
f for f in glob.glob(os.path.join(task, "*"))
if f != exf
]
def raise_json_for_status(request):
try:
request.raise_for_status()
except Exception as exc:
exc.args = exc.args + (request.json(), )
raise exc.with_traceback(exc.__traceback__)
def process_repo(repo, task):
owner = "conda-forge"
headers = {
"X-GitHub-Api-Version": "2022-11-28",
"Accept": "application/vnd.github+json",
"User-Agent": "conda-forge/admin-requests",
"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}",
}
r = requests.get(
f"https://api.github.com/repos/{owner}/{repo}",
headers=headers,
)
raise_json_for_status(r)
if task == "archive":
target_status = "archived"
else:
target_status = "unarchived"
data = r.json()
if task == "archive" and data["archived"]:
print("feedstock %s is already %s" % (repo, target_status), flush=True)
return
if task == "unarchive" and not data["archived"]:
print("feedstock %s is already %s" % (repo, target_status), flush=True)
return
r = requests.patch(
f"https://api.github.com/repos/{owner}/{repo}",
headers=headers,
json={"archived": task == "archive"}
)
raise_json_for_status(r)
print("feedstock %s was %s" % (repo, target_status), flush=True)
def process_feedstocks_in_file(task_file, task):
pkgs_to_do_again = []
with open(task_file, "r") as fp:
for line in fp:
line = line.strip()
if line.startswith("#") or len(line) == 0:
continue
try:
process_repo(line + "-feedstock", task)
except Exception as e:
print(
"failed to %s '%s': %s" % (task, line, repr(e)),
flush=True,
)
pkgs_to_do_again.append(line)
if pkgs_to_do_again:
with open(task_file, "w") as fp:
fp.write(
"# %s failed for these feedstocks - "
"trying again later\n" % task
)
for pkg in pkgs_to_do_again:
fp.write(pkg + "\n")
subprocess.check_call(f"git add {task_file}", shell=True)
subprocess.check_call(
f"git commit --allow-empty -m 'Keeping {task_file} "
f"after failed {task}'",
shell=True,
)
else:
subprocess.check_call(f"git rm {task_file}", shell=True)
subprocess.check_call(
f"git commit -m 'Remove {task_file} after {task}'",
shell=True,
)
subprocess.check_call("git show", shell=True)
def check_for_feedstocks_in_file(token_reset_file):
missing_feedstocks = []
with open(token_reset_file, "r") as fp:
for line in fp.readlines():
line = line.strip()
if line.startswith("#") or len(line) == 0:
continue
r = requests.get(
"https://github.com/conda-forge/%s-feedstock" % line
)
if r.status_code != 200:
missing_feedstocks.append(line)
return missing_feedstocks
def main(*, check_only):
missing_feedstocks = []
for task in "archive", "unarchive":
task_files = get_task_files(task)
for task_file in task_files:
print("working on file %s" % task_file, flush=True)
if check_only:
missing_feedstocks.extend(
check_for_feedstocks_in_file(task_file)
)
else:
process_feedstocks_in_file(task_file, task)
if missing_feedstocks:
raise RuntimeError(
"feedstocks %s could not be found!" % list(set(missing_feedstocks))
)
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.exit("Usage: python archive_feedstock.py [check | archive]")
check_only = sys.argv[1] == "check"
main(check_only=check_only)