Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
sastels committed Mar 12, 2024
1 parent 8e43d99 commit 177995f
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 11 deletions.
10 changes: 1 addition & 9 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,18 +60,10 @@ def get_job_metadata_from_s3(service_id, job_id):
return obj.get()["Metadata"]


# adapted from https://docs.aws.amazon.com/AmazonS3/latest/userguide/example_s3_DeleteObjects_section.html
def remove_job_batch_from_s3(jobs):
bucket = resource("s3").Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"])
object_keys = [FILE_LOCATION_STRUCTURE.format(job.service_id, job.id) for job in jobs]

try:
response = bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]})
except botocore.exceptions.ClientError:
current_app.logger.exception("Couldn't delete any objects from bucket %s.", bucket.name)
raise
else:
return response
bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]})


def get_s3_bucket_objects(bucket_name, subfolder="", older_than=7, limit_days=2):
Expand Down
3 changes: 1 addition & 2 deletions app/celery/nightly_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,7 @@ def _remove_csv_files(job_types):
current_app.logger.info("Archiving {} jobs.".format(len(jobs)))
s3.remove_job_batch_from_s3(jobs)
dao_archive_job_batch(jobs)
for job in jobs:
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))
current_app.logger.info(f"Jobs archived: {[job.id for job in jobs]}")


@notify_celery.task(name="delete-sms-notifications")
Expand Down

0 comments on commit 177995f

Please sign in to comment.