Skip to content

Commit

Permalink
bug fixes for submission data passthrough and docker build, added bas…
Browse files Browse the repository at this point in the history
…e resource page frame

Signed-off-by: Duncan Ragsdale <[email protected]>
  • Loading branch information
Thistleman committed Jul 30, 2024
1 parent 18e58b2 commit 0bd115c
Show file tree
Hide file tree
Showing 7 changed files with 113 additions and 48 deletions.
28 changes: 28 additions & 0 deletions valhub/submissions/migrations/0008_auto_20240730_1750.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Generated by Django 3.2.16 on 2024-07-30 17:50

import django.contrib.postgres.fields
from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("submissions", "0007_alter_submission_result"),
]

operations = [
migrations.RemoveField(
model_name="submission",
name="mae",
),
migrations.AlterField(
model_name="submission",
name="data_requirements",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=100),
blank=True,
default=list,
size=None,
),
),
]
5 changes: 4 additions & 1 deletion valhub/submissions/models.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from django.db import models
from django.contrib.postgres.fields import ArrayField

from analyses.models import Analysis
from base.utils import RandomFileName
Expand Down Expand Up @@ -47,7 +48,9 @@ class Submission(models.Model):
alt_name = models.TextField(null=True, blank=True, default="")
# mrt - mean run time
mrt = models.FloatField(null=True, blank=True)
data_requirements = models.TextField(null=True, blank=True)
data_requirements = ArrayField(
models.CharField(max_length=100), blank=True, default=list
)
archived = models.BooleanField(default=False)
python_version = models.DecimalField(
max_digits=4,
Expand Down
49 changes: 31 additions & 18 deletions valhub/submissions/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,22 +135,35 @@ def to_representation(self, instance):
"username": instance.created_by.username,
}

# Attempt to load the stringified array from the text field
try:
data_requirements = json.loads(instance.data_requirements)
# Ensure that the loaded object is a list
if not isinstance(data_requirements, list):
raise ValueError(
"Loaded object is not a list"
) # Include a message with the ValueError
except (ValueError, TypeError) as e:
logger.error(
f"Failed to parse data_requirements for submission {instance.submission_id}: {e}"
)
logger.error(f"With a value of {instance.data_requirements}")
data_requirements = (
[]
) # Default to an empty list if any error occurs

data["data_requirements"] = data_requirements
return data


class SubmissionPrivateReportSerializer(serializers.ModelSerializer):
"""
Serialize the private report of Submission Model.
"""

class Meta:
model = Submission
fields = (
"submission_id",
"result",
"mrt",
"data_requirements",
"submitted_at",
"alt_name",
"archived",
"python_version",
"worker_version",
)

def to_representation(self, instance):
data = super(
SubmissionPrivateReportSerializer, self
).to_representation(instance)
data["created_by"] = {
"uuid": instance.created_by.uuid,
"username": instance.created_by.username,
}

return data
17 changes: 16 additions & 1 deletion valhub/submissions/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,11 @@
from .models import Submission
from urllib.parse import urljoin

from .serializers import SubmissionSerializer, SubmissionDetailSerializer
from .serializers import (
SubmissionSerializer,
SubmissionDetailSerializer,
SubmissionPrivateReportSerializer,
)
from .models import Submission


Expand Down Expand Up @@ -273,6 +277,11 @@ def update_submission_result(request: Request, submission_id: str):
response_data = {"error": f"{field} is required"}
return Response(response_data, status=status.HTTP_400_BAD_REQUEST)

# Validate that function_parameters is a list
if not isinstance(results["function_parameters"], list):
response_data = {"error": "function_parameters must be a list"}
return Response(response_data, status=status.HTTP_400_BAD_REQUEST)

logging.info(f"results = {results}")
submission.mrt = float(results["mean_run_time"])
submission.data_requirements = results["function_parameters"]
Expand Down Expand Up @@ -375,6 +384,9 @@ def leaderboard_update(request: Request):
submission.mrt = float(mrt)

if data_requirements is not None:
if isinstance(data_requirements, str):
# Convert the string to a list of strings
data_requirements = [data_requirements]
submission.data_requirements = data_requirements

submission.save()
Expand Down Expand Up @@ -543,6 +555,9 @@ def get_submission_results(request: Request, submission_id: str):
# set returns
logging.info(f"setting returns")
ret["marimo_url"] = file_urls
ret["submission_details"] = SubmissionPrivateReportSerializer(
submission
).data

return JsonResponse(ret, status=status.HTTP_200_OK)

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 3.2.16 on 2024-07-30 17:50

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("system_metadata", "0001_initial"),
]

operations = [
migrations.AlterField(
model_name="systemmetadata",
name="dc_capacity",
field=models.FloatField(default=0.0),
),
]
31 changes: 8 additions & 23 deletions workers/src/pvinsight-validation-runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,7 +564,7 @@ def run( # noqa: C901
"data_requirements"
].iloc[0]

metrics_list = []
metrics_dict = {}

# Get the mean and median absolute errors
# when combining the metric and name for the public metrics dictionary,
Expand All @@ -582,22 +582,10 @@ def run( # noqa: C901

mean_metric = results_df[metric_name].mean()

public_metrics_dict["mean_" + metric] = mean_metric

metric_tuple = (
f"mean_{metric}",
mean_metric,
)
metrics_list.append(metric_tuple)
metrics_dict[f"mean_{metric}"] = mean_metric

median_metric = results_df[metric_name].median()
public_metrics_dict["median_" + metric] = median_metric

metric_tuple = (
f"median_{metric}",
median_metric,
)
metrics_list.append(metric_tuple)
metrics_dict[f"median_{metric}"] = median_metric
elif "runtime" in metric:
key = "run_time"

Expand All @@ -606,13 +594,10 @@ def run( # noqa: C901

mean_metric = results_df[key].mean()

metric_tuple = (
f"mean_{key}",
mean_metric,
)
metrics_list.append(metric_tuple)
metrics_dict[f"mean_{key}"] = mean_metric

public_metrics_dict["metrics"] = json.dumps(metrics_list)
# json dump no longer needed, as using json field in database
public_metrics_dict["metrics"] = metrics_dict

# Write public metric information to a public results table.
with open(
Expand Down Expand Up @@ -1184,8 +1169,8 @@ def generate_performance_metrics_for_submission(
results_dictionary["file_name"] = file_name
# Set the runtime in the results dictionary
results_dictionary["run_time"] = submission_runtime
# Set the data requirements in the dictionary, JSON required or bad juju happens in my DB and FE
results_dictionary["data_requirements"] = json.dumps(function_parameters)
# Set the data requirements in the dictionary, must be a list for DB array field
results_dictionary["data_requirements"] = function_parameters
# Loop through the rest of the performance metrics and calculate them
# (this predominantly applies to error metrics)
for metric in performance_metrics:
Expand Down
13 changes: 8 additions & 5 deletions workers/src/utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -1044,11 +1044,14 @@ def create_docker_image(
buildargs={"zip_file": f"{submission_file_name}"},
)
for line in live_log_generator:
line_dict = json.loads(line)
if line_dict.get("stream"):
logger_if_able(
line_dict["stream"].rstrip(), logger, "INFO"
)
try:
line_dict = json.loads(line)
if line_dict.get("stream"):
logger_if_able(
line_dict["stream"].rstrip(), logger, "INFO"
)
except json.JSONDecodeError:
logger_if_able(line, logger, "INFO")

logger_if_able("Docker image created")

Expand Down

0 comments on commit 0bd115c

Please sign in to comment.