Skip to content

Commit

Permalink
limit option added to list_backup_csvs.py
Browse files Browse the repository at this point in the history
  • Loading branch information
davedavemckay committed Jul 30, 2024
1 parent 7c69715 commit af612e9
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
1 change: 0 additions & 1 deletion csd3-side/scripts/lsst-backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import base64
import pandas as pd
import numpy as np
import glob
import subprocess
import yaml
import io
Expand Down
6 changes: 4 additions & 2 deletions scripts/list_backup_csvs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,12 @@
parser.add_argument('--bucket_name', '-b', type=str, help='The name of the S3 bucket.')
parser.add_argument('--download', action='store_true', default=False, help='Download the backup log.')
parser.add_argument('--save-list', type=str, help='Write the list to file given absolute path.')
parser.add_argument('--limit', type=int, help='Limit the number of objects to list.', default=1000)
args = parser.parse_args()

bucket_name = args.bucket_name
download = args.download
limit = args.limit

if args.save_list:
save_list = args.save_list
Expand Down Expand Up @@ -52,8 +54,8 @@
total_size = 0

# Download the backup log
# Limited to 1000 objects - this is to prevent this script from hanging if there are a large number of objects in the bucket
for ob in bucket.objects.filter(Prefix='butler').limit(1000):
# Limited to 1000 objects by default - this is to prevent this script from hanging if there are a large number of objects in the bucket
for ob in bucket.objects.filter(Prefix='butler').limit(limit):
if ob.key.count('/') > 0:
continue
if log_suffix in ob.key or previous_log_suffix in ob.key:
Expand Down

0 comments on commit af612e9

Please sign in to comment.