diff --git a/api/urls.py b/api/urls.py index 72bd3aa..52e8252 100644 --- a/api/urls.py +++ b/api/urls.py @@ -13,5 +13,8 @@ re_path('test', views.test), re_path('rpms_images_fetcher', views.rpms_images_fetcher_view), re_path('login', views.login_view, name='login'), - re_path('check_auth', views.check_auth, name='check_auth') + re_path('check_auth', views.check_auth, name='check_auth'), + re_path('advisory_activites', views.get_advisory_activities, name='advisory_activities'), + re_path('release_schedule', views.get_release_schedule, name='release_schedule'), + re_path('release_status', views.get_release_status, name='release_status') ] diff --git a/api/views.py b/api/views.py index 943d266..bc8ada7 100644 --- a/api/views.py +++ b/api/views.py @@ -11,12 +11,17 @@ from . import request_dispatcher from .serializer import BuildSerializer import django_filters +import requests +import base64 +import yaml import json import re import os import jwt from datetime import datetime, timedelta from build_interface.settings import SECRET_KEY, SESSION_COOKIE_DOMAIN, JWTAuthentication +from django.http import JsonResponse +from lib.errata.errata_requests import get_advisory_status_activities, get_advisory_schedule, get_feature_freeze_schedule, get_ga_schedule class BuildDataFilter(django_filters.FilterSet): @@ -169,6 +174,87 @@ def branch_data(request): return response +@api_view(["GET"]) +def get_advisory_activities(request): + request_id = request.query_params.get("advisory", None) + + if request_id is None: + return JsonResponse({"status": "error", "message": "Missing \"advisory\" params in the url."}) + else: + return JsonResponse(get_advisory_status_activities(request_id)) + + +@api_view(["GET"]) +def get_release_schedule(request): + request_type = request.query_params.get("type", None) + branch_version = request.query_params.get("branch_version", None) + + if request_type is None: + return JsonResponse({"status": "error", "message": "Missing \"type\" params in the url."}) + if request_type not in ["ga", "release", "feature_freeze"]: + return JsonResponse({"status": "error", "message": "Invalid \"type\" params in the url. It sould be in ga,release,feature_freeze"}) + if branch_version is None: + return JsonResponse({"status": "error", "message": "Missing \"branch_version\" params in the url."}) + if request_type == "ga": + return JsonResponse(get_ga_schedule(branch_version), safe=False) + elif request_type == "release": + return JsonResponse(get_advisory_schedule(branch_version), safe=False) + elif request_type == "feature_freeze": + return JsonResponse(get_feature_freeze_schedule(branch_version), safe=False) + + +shipped_advisory = [] + + +@api_view(["GET"]) +def get_release_status(request): + ga_version = get_ga_version() + major, minor = int(ga_version.split('.')[0]), int(ga_version.split('.')[1]) + status = {"message": [], "alert": [], "unshipped": []} + headers = {"Authorization": f"token {os.environ['GITHUB_PERSONAL_ACCESS_TOKEN']}"} + for r in range(0, 4): + version = minor - r + advisory_schedule = get_advisory_schedule(f"{major}.{version}")['all_ga_tasks'] + for release in advisory_schedule: + if datetime.strptime(release['date_finish'], "%Y-%m-%d") < datetime.now(): + release_date, release_name = release['date_finish'], release['name'] + else: + break + if "GA" in release_name: + assembly = re.search(r'\d+\.\d+', release_name).group() + ".0" + else: + assembly = re.search(r'\d+\.\d+.\d+', release_name).group() + status['message'].append({"release": f"{major}.{version}", "status": f"{assembly} release date is {release_date} and {release['name']} release date is {release['date_finish']}"}) + res = requests.get(f"https://api.github.com/repos/openshift/ocp-build-data/contents/releases.yml?ref=openshift-{major}.{version}", headers=headers) + release_assembly = yaml.safe_load(base64.b64decode(res.json()['content']))['releases'][assembly]['assembly'] + if "group" in release_assembly.keys(): + if 'advisories!' in release_assembly['group'].keys(): + advisories = release_assembly['group']['advisories!'] + elif 'advisories' in release_assembly['group'].keys(): + advisories = release_assembly['group']['advisories'] + else: + advisories = {} + for ad in advisories: + if datetime.strptime(release_date, "%Y-%m-%d").strftime("%Y-%m-%d") == datetime.now().strftime("%Y-%m-%d"): + if advisories[ad] in shipped_advisory: + status['alert'].append({"release": f"{major}.{version}", "status": f"{assembly} advisory is shipped live"}) + else: + errata_activity = get_advisory_status_activities(advisories[ad])['data'] + if len(errata_activity) > 0: + errata_state = errata_activity[-1]['attributes']['added'] + else: + errata_state = "NEW_FILES" + if errata_state == "SHIPPED_LIVE": + shipped_advisory.append(advisories[ad]) + status['alert'].append({"release": f"{major}.{version}", "status": f"{assembly} advisory is shipped live"}) + elif errata_state == "DROPPED_NO_SHIP": + status['alert'].append({"release": f"{major}.{version}", "status": f"{assembly} advisory is dropped"}) + else: + status['alert'].append({"release": f"{major}.{version}", "status": f"{assembly} advisory is {errata_state}, release date is today"}) + status['unshipped'].append({"advisory": advisories[ad], "note": f"{assembly} {ad} advisory"}) + return JsonResponse(status) + + @api_view(["GET"]) def test(request): return Response({ diff --git a/cron_tasks/Dockerfile b/cron_tasks/Dockerfile new file mode 100644 index 0000000..7a20fe2 --- /dev/null +++ b/cron_tasks/Dockerfile @@ -0,0 +1,3 @@ +FROM registry.redhat.io/ubi9/ubi-minimal:latest +RUN microdnf install -y python pip && python3 -m pip install --upgrade pip && pip3 install slack_sdk requests +COPY check_schedule.py /check_schedule.py diff --git a/cron_tasks/check_schedule.py b/cron_tasks/check_schedule.py new file mode 100644 index 0000000..d91a1b8 --- /dev/null +++ b/cron_tasks/check_schedule.py @@ -0,0 +1,42 @@ +import os +import requests +from typing import Optional +import time +from slack_sdk import WebClient + +slack_token = os.environ.get('SLACK_TOKEN', None) +API_ENDPOINT = "https://art-dash-server-art-dashboard-server.apps.artc2023.pc3z.p1.openshiftapps.com/api/v1" + + +def post_slack_message(message: str, thread_ts: Optional[str] = None,): + response = WebClient(token=slack_token).chat_postMessage( + channel="#forum-ocp-release", + text=message, + thread_ts=thread_ts, username="art-release-bot", link_names=True, attachments=[], icon_emoji=":dancing_robot:", reply_broadcast=False) + return response + + +release_status = requests.get(f"{API_ENDPOINT}/release_status").json() +if release_status['alert'] != []: + response = post_slack_message(' \n'.join([msg['status'] for msg in release_status['alert']])) + print(f"message posted in https://redhat-internal.slack.com/archives/{response['channel']}/p{response['ts'].replace('.', '')}") + if release_status['unshipped'] != []: + post_slack_message("start monitoring advisory not in shipped live status, interval set to 1 hour ...", thread_ts=response['ts']) + while release_status['unshipped'] != []: + for item in release_status['unshipped']: + # check ad status + advisory_status_response = requests.get(f"{API_ENDPOINT}/advisory_activites/?advisory={item['advisory']}").json() + errata_activity = advisory_status_response['data'] + if len(errata_activity) > 0: + advisory_status = errata_activity[-1]['attributes']['added'] + else: + advisory_status = "NEW_FILES" + if advisory_status == "SHIPPED_LIVE" or advisory_status == "DROPPED_NO_SHIP": + release_status['unshipped'].remove(item) + post_slack_message(f"{item['note']} status changed to {advisory_status}", thread_ts=response['ts']) + # sleep 1 hours + print(f"sleeping 1 hours due to {release_status['unshipped']}") + time.sleep(3600) + post_slack_message("All advisory now in shipped live status, stop monitoring", thread_ts=response['ts']) +else: + print("No alert", [msg['status'] for msg in release_status['message']]) diff --git a/lib/errata/errata_requests.py b/lib/errata/errata_requests.py index a272d85..9e021e1 100644 --- a/lib/errata/errata_requests.py +++ b/lib/errata/errata_requests.py @@ -7,6 +7,8 @@ from requests_gssapi import HTTPSPNEGOAuth import ssl +PP_SERVER = "https://pp.engineering.redhat.com/api/v7/releases" + @update_keytab def get_advisory_data(advisory_id): @@ -54,6 +56,41 @@ def get_user_data(user_id): return None +def catch_request_result(url): + try: + print(url) + response = requests.get(url, verify=ssl.get_default_verify_paths().openssl_cafile, auth=HTTPSPNEGOAuth(), headers={'Content-Type': 'application/json'}) + return json.loads(response.text) + except Exception as e: + print(e) + raise e + + +@update_keytab +def get_advisory_status_activities(advisory_id): + """ + This method returns advisory activities for a given id. + :param advisory_id: The id of the advisory to get data for. + :return: Dict, advisory data. + """ + return catch_request_result(f"https://errata.devel.redhat.com/api/v1/erratum/{advisory_id}/activities?filter[what]=status") + + +@update_keytab +def get_advisory_schedule(branch_version): + return catch_request_result(f"{PP_SERVER}/openshift-{branch_version}.z/?fields=all_ga_tasks") + + +@update_keytab +def get_feature_freeze_schedule(branch_version): + return catch_request_result(f"{PP_SERVER}/openshift-{branch_version}/schedule-tasks/?name__regex=Feature+Development+for") + + +@update_keytab +def get_ga_schedule(branch_version): + return catch_request_result(f"{PP_SERVER}/openshift-{branch_version}/schedule-tasks/?name=OpenShift+Container+Platform+GA+Release+Schedule") + + def format_user_data(user_data): return user_data