Skip to content

Commit

Permalink
Merge pull request #92 from naved001/log-everything
Browse files Browse the repository at this point in the history
  • Loading branch information
larsks authored Dec 4, 2024
2 parents 616a196 + 40e1cb5 commit 174864a
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 9 deletions.
11 changes: 8 additions & 3 deletions openshift_metrics/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Merges metrics from files and produces reports by pod and by namespace
"""

import logging
import os
import argparse
from datetime import datetime, UTC
Expand All @@ -13,6 +14,9 @@
from openshift_metrics import utils, invoice
from openshift_metrics.metrics_processor import MetricsProcessor

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def compare_dates(date_str1, date_str2):
"""Returns true is date1 is earlier than date2"""
date1 = datetime.strptime(date_str1, "%Y-%m-%d")
Expand Down Expand Up @@ -96,14 +100,15 @@ def main():
elif compare_dates(report_end_date, metrics_from_file["end_date"]):
report_end_date = metrics_from_file["end_date"]

print(report_start_date)
print(report_end_date)
logger.info(f"Generating report from {report_start_date} to {report_end_date}")

report_start_date = datetime.strptime(report_start_date, "%Y-%m-%d")
report_end_date = datetime.strptime(report_end_date, "%Y-%m-%d")

report_month = datetime.strftime(report_start_date, "%Y-%m")

if args.use_nerc_rates:
logger.info("Using nerc rates.")
nerc_data = nerc_rates.load_from_url()
rates = invoice.Rates(
cpu=Decimal(nerc_data.get_value_at("CPU SU Rate", report_month)),
Expand All @@ -130,7 +135,7 @@ def main():
pod_report_file = f"Pod NERC OpenShift {report_month}.csv"

if report_start_date.month != report_end_date.month:
print("Warning: The report spans multiple months")
logger.warning("The report spans multiple months")
report_month += " to " + datetime.strftime(report_end_date, "%Y-%m")

condensed_metrics_dict = processor.condense_metrics(
Expand Down
9 changes: 7 additions & 2 deletions openshift_metrics/openshift_prometheus_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,15 @@
import os
import sys
import json
import logging

from openshift_metrics import utils
from openshift_metrics.prometheus_client import PrometheusClient
from openshift_metrics.metrics_processor import MetricsProcessor

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

CPU_REQUEST = 'kube_pod_resource_request{unit="cores"} unless on(pod, namespace) kube_pod_status_unschedulable'
MEMORY_REQUEST = 'kube_pod_resource_request{unit="bytes"} unless on(pod, namespace) kube_pod_status_unschedulable'
GPU_REQUEST = 'kube_pod_resource_request{resource=~"nvidia.com.*"} unless on(pod, namespace) kube_pod_status_unschedulable'
Expand Down Expand Up @@ -71,8 +75,7 @@ def main():
else:
output_file = f"metrics-{report_start_date}-to-{report_end_date}.json"

print(f"Generating report starting {report_start_date} and ending {report_end_date} in {output_file}")

logger.info(f"Generating report starting {report_start_date} and ending {report_end_date} in {output_file}")

token = os.environ.get("OPENSHIFT_TOKEN")
prom_client = PrometheusClient(openshift_url, token)
Expand All @@ -98,6 +101,7 @@ def main():
node_labels = prom_client.query_metric(KUBE_NODE_LABELS, report_start_date, report_end_date)
metrics_dict["gpu_metrics"] = MetricsProcessor.insert_node_labels(node_labels, gpu_request_metrics)
except utils.EmptyResultError:
logger.info(f"No GPU metrics found for the period {report_start_date} to {report_end_date}")
pass

month_year = datetime.strptime(report_start_date, "%Y-%m-%d").strftime("%Y-%m")
Expand All @@ -108,6 +112,7 @@ def main():
s3_location = f"data_{month_year}/metrics-{report_start_date}-to-{report_end_date}.json"

with open(output_file, "w") as file:
logger.info(f"Writing metrics to {output_file}")
json.dump(metrics_dict, file)

if args.upload_to_s3:
Expand Down
8 changes: 6 additions & 2 deletions openshift_metrics/prometheus_client.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import requests
import time
import logging

from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
from openshift_metrics.utils import EmptyResultError

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

class PrometheusClient:
def __init__(self, prometheus_url: str, token: str, step_min: int=15):
self.prometheus_url = prometheus_url
Expand All @@ -22,7 +26,7 @@ def query_metric(self, metric, start_date, end_date):
session = requests.Session()
session.mount("https://", HTTPAdapter(max_retries=retries))

print(f"Retrieving metric: {metric}")
logger.info(f"Retrieving metric: {metric}")

for _ in range(3):
response = session.get(url, headers=headers, verify=True)
Expand All @@ -33,7 +37,7 @@ def query_metric(self, metric, start_date, end_date):
data = response.json()["data"]["result"]
if data:
break
print("Empty result set")
logger.warning("Empty result set")
time.sleep(3)

if not data:
Expand Down
8 changes: 6 additions & 2 deletions openshift_metrics/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,14 @@
import csv
import requests
import boto3
import logging

from openshift_metrics import invoice
from decimal import Decimal

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


class EmptyResultError(Exception):
"""Raise when no results are retrieved for a query"""
Expand Down Expand Up @@ -69,7 +73,7 @@ def upload_to_s3(file, bucket, location):
aws_access_key_id=s3_key_id,
aws_secret_access_key=s3_secret,
)

logger.info(f"Uploading {file} to s3://{bucket}/{location}")
response = s3.upload_file(file, Bucket=bucket, Key=location)


Expand Down Expand Up @@ -104,7 +108,7 @@ def get_namespace_attributes():

def csv_writer(rows, file_name):
"""Writes rows as csv to file_name"""
print(f"Writing csv to {file_name}")
logger.info(f"Writing report to {file_name}")
with open(file_name, "w") as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerows(rows)
Expand Down

0 comments on commit 174864a

Please sign in to comment.