From af612e9c6e7f80d7bfc321e0522d919a79101d02 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Tue, 30 Jul 2024 09:17:44 +0100 Subject: [PATCH] limit option added to list_backup_csvs.py --- csd3-side/scripts/lsst-backup.py | 1 - scripts/list_backup_csvs.py | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/csd3-side/scripts/lsst-backup.py b/csd3-side/scripts/lsst-backup.py index 98ea561..067c0f8 100644 --- a/csd3-side/scripts/lsst-backup.py +++ b/csd3-side/scripts/lsst-backup.py @@ -26,7 +26,6 @@ import base64 import pandas as pd import numpy as np -import glob import subprocess import yaml import io diff --git a/scripts/list_backup_csvs.py b/scripts/list_backup_csvs.py index 0cb800f..36fc84f 100644 --- a/scripts/list_backup_csvs.py +++ b/scripts/list_backup_csvs.py @@ -15,10 +15,12 @@ parser.add_argument('--bucket_name', '-b', type=str, help='The name of the S3 bucket.') parser.add_argument('--download', action='store_true', default=False, help='Download the backup log.') parser.add_argument('--save-list', type=str, help='Write the list to file given absolute path.') +parser.add_argument('--limit', type=int, help='Limit the number of objects to list.', default=1000) args = parser.parse_args() bucket_name = args.bucket_name download = args.download +limit = args.limit if args.save_list: save_list = args.save_list @@ -52,8 +54,8 @@ total_size = 0 # Download the backup log -# Limited to 1000 objects - this is to prevent this script from hanging if there are a large number of objects in the bucket -for ob in bucket.objects.filter(Prefix='butler').limit(1000): +# Limited to 1000 objects by default - this is to prevent this script from hanging if there are a large number of objects in the bucket +for ob in bucket.objects.filter(Prefix='butler').limit(limit): if ob.key.count('/') > 0: continue if log_suffix in ob.key or previous_log_suffix in ob.key: