Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ensures that the time tracking report can be successfully uploaded to… #2220

Merged
merged 1 commit into from
Dec 20, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 16 additions & 10 deletions src/palace/manager/scripts/playtime_entries.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
import argparse
import csv
import os
import tempfile
import uuid
from collections import defaultdict
from collections.abc import Iterable
from datetime import datetime, timedelta
from tempfile import TemporaryFile
from pathlib import Path
from typing import TYPE_CHECKING, Any, Protocol

import dateutil.parser
Expand Down Expand Up @@ -191,7 +192,7 @@ def do_run(self):
)

# Write to a temporary file so we don't overflow the memory
with TemporaryFile(
with tempfile.NamedTemporaryFile(
"w+",
prefix=f"playtimereport{formatted_until_date}",
suffix=link_extension,
Expand All @@ -204,8 +205,6 @@ def do_run(self):
records=self._fetch_report_records(start=start, until=until),
)

# Rewind the file and send the report email
temp.seek(0)
recipient = os.environ.get(
Configuration.REPORTING_EMAIL_ENVIRONMENT_VARIABLE
)
Expand All @@ -217,12 +216,18 @@ def do_run(self):
f"{linked_file_name}"
)

s3_service = self.services.storage.public()
s3_service.store_stream(
key,
temp,
content_type="text/csv",
)
# The only way I could get S3 to accept the stream was by
# reopening it as a binary stream: otherwise it was a failing on
# a "Strings must be encoded before hashing" error from s3.
with Path(temp.name).open(
"rb",
) as binary_stream:
s3_service = self.services.storage.public()
s3_service.store_stream(
key,
binary_stream,
content_type="text/csv",
)

s3_file_link = s3_service.generate_url(key)

Expand All @@ -236,6 +241,7 @@ def do_run(self):
)
else:
self.log.error("No reporting email found, logging complete report.")
temp.seek(0)
self.log.warning(temp.read())

def _fetch_report_records(self, start: datetime, until: datetime) -> Query:
Expand Down
Loading