Skip to content

Commit

Permalink
Merge pull request #2386 from teojgo/test/s3_rm_state
Browse files Browse the repository at this point in the history
[test] Use test dependencies instead of shared state for S3 tests
  • Loading branch information
Vasileios Karakasis authored Jan 19, 2022
2 parents 7840551 + 51e1b4f commit 3125b5b
Show file tree
Hide file tree
Showing 7 changed files with 37 additions and 111 deletions.
3 changes: 1 addition & 2 deletions cscs-checks/system/openstack/s3_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ class S3apiUploadLargeObject(S3apiCheck):

@run_after('init')
def set_deps_and_exec_opts(self):
self.depends_on('S3apiCreateBucket')
self.depends_on('S3apiCreateSmallObject')
self.executable_opts = ['s3_upload_large_object.py',
self.current_system.name,
self.username]
Expand Down Expand Up @@ -146,7 +146,6 @@ class S3apiDeleteBucketObject(S3apiCheck):

@run_after('init')
def set_deps_and_exec_opts(self):
self.depends_on('S3apiCreateSmallObject')
self.depends_on('S3apiDownloadLargeObject')
self.executable_opts = ['s3_delete.py',
self.current_system.name,
Expand Down
14 changes: 4 additions & 10 deletions cscs-checks/system/openstack/src/s3_create_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,11 @@
start = time.time()

for count in range(nbuckets):
bkt_name = '%s_%s_reframe_s3_bucket_%d' % (system, username, count)
print('Creating bucket %s' % bkt_name)
bkt_name = f'{system}_{username}_reframe_s3_bucket_{count}'
print(f'Creating bucket {bkt_name}')
conn.create_bucket(bkt_name)

end = time.time()

elapsed_secs = end - start
avg_creation_time = float(elapsed_secs)/nbuckets
print('Average bucket creation time (s): %f' % avg_creation_time)

# Using a shared object to serialize the tests.
# This will be removed ones the test dependency feature is available.
state = 'create_bucket_done'
tools.set_state(conn, system, username, state)
avg_creation_time = elapsed_secs / nbuckets
print(f'Average bucket creation time (s): {avg_creation_time}')
22 changes: 5 additions & 17 deletions cscs-checks/system/openstack/src/s3_create_small_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,32 +6,20 @@

system = sys.argv[1]
username = sys.argv[2]

conn = tools.get_connection()

# Wait until the create bucket test is done
tools.wait_for_state(conn, system, username, 'create_bucket_done')

nobjects = 10

print('All buckets: ', conn.get_all_buckets())

bkt_name = '%s_%s_reframe_s3_bucket_0' % (system, username)
bkt_name = f'{system}_{username}_reframe_s3_bucket_0'
bkt = conn.get_bucket(bkt_name)

start = time.time()

for count in range(nobjects):
obj_name = 'obj_small_%d' % count
print('Creating object %s' % obj_name)
obj_name = 'obj_small_{count}'
print(f'Creating object {obj_name}')
obj = bkt.new_key(obj_name)
obj.set_contents_from_string('Test!')

end = time.time()

elapsed_secs = end - start
avg_creation_time = float(elapsed_secs)/nobjects
print('Average object creation time (s): %f' % avg_creation_time)

state = 'create_small_object_done'
tools.set_state(conn, system, username, state)
avg_creation_time = elapsed_secs / nobjects
print(f'Average object creation time (s): {avg_creation_time}')
9 changes: 2 additions & 7 deletions cscs-checks/system/openstack/src/s3_delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,11 @@

system = sys.argv[1]
username = sys.argv[2]

conn = tools.get_connection()

# Wait until the download large object test is done
tools.wait_for_state(conn, system, username, 'download_large_object_done')

start = time.time()
tools.delete_reframe_buckets(conn, system, username)
end = time.time()
nobjects = 30 # 10 buckets + 10 small + 10 large objects
elapsed_secs = end - start
avg_deletion_time = float(elapsed_secs)/nobjects
print('Average deletion time (s): %f' % avg_deletion_time)
avg_deletion_time = elapsed_secs / nobjects
print(f'Average deletion time (s): {avg_deletion_time}')
30 changes: 8 additions & 22 deletions cscs-checks/system/openstack/src/s3_download_large_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,39 +7,25 @@

system = sys.argv[1]
username = sys.argv[2]

conn = tools.get_connection()

# Wait until the create small object test is done
tools.wait_for_state(conn, system, username, 'upload_large_object_done')

nobjects = 10

print(conn.get_all_buckets())

bkt_name = '%s_%s_reframe_s3_bucket_0' % (system, username)
bkt_name = f'{system}_{username}_reframe_s3_bucket_0'
bkt = conn.get_bucket(bkt_name)

print('Working in bucket: %s' % bkt.name)
print('Content of this bucket: %s' % bkt.list())

print(f'Working in bucket: {bkt_name}')
print(f'Content of this bucket: {bkt.list()}')
test_file = tempfile.NamedTemporaryFile(dir='/tmp', delete=False)

start = time.time()

for count in range(nobjects):
obj_name = 'obj_large_%d' % count
print('Downloading object %s from bucket %s to file %s'
% (obj_name, bkt.name, test_file.name))
obj_name = f'obj_large_{count}'
print(f'Downloading object {obj_name} from bucket {bkt.name} '
f'to file {test_file.name}')
obj = bkt.new_key(obj_name)
obj.get_contents_to_filename(test_file.name)

end = time.time()

elapsed_secs = end - start
size_mb = 1024 * nobjects
avg_download_rate = float(size_mb/elapsed_secs)
print('Average download rate (MiB/s): %f' % avg_download_rate)

state = 'download_large_object_done'
tools.set_state(conn, system, username, state)
avg_download_rate = size_mb / elapsed_secs
print(f'Average download rate (MiB/s): {avg_download_rate}')
29 changes: 8 additions & 21 deletions cscs-checks/system/openstack/src/s3_upload_large_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,38 +8,25 @@

system = sys.argv[1]
username = sys.argv[2]

conn = tools.get_connection()

# Wait until the create small object test is done
tools.wait_for_state(conn, system, username, 'create_small_object_done')

nobjects = 10

bkt_name = '%s_%s_reframe_s3_bucket_0' % (system, username)
bkt_name = f'{system}_{username}_reframe_s3_bucket_0'
bkt = conn.get_bucket(bkt_name)

test_file = tempfile.NamedTemporaryFile(dir='/tmp', delete=False)
cmd = 'dd if=/dev/zero of=%s bs=1M count=1024' % test_file.name
p = subprocess.Popen(cmd.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
cmd = f'dd if=/dev/zero of={test_file.name} bs=1M count=1024'
p = subprocess.Popen(
cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()

start = time.time()

for count in range(nobjects):
obj_name = 'obj_large_%d' % count
print('Creating object %s' % obj_name)
obj_name = f'obj_large_{count}'
print(f'Creating object {obj_name}')
obj = bkt.new_key(obj_name)
obj.set_contents_from_filename(test_file.name)

end = time.time()

elapsed_secs = end - start
size_mb = 1024 * nobjects
avg_upload_rate = float(size_mb/elapsed_secs)
print('Average upload rate (MiB/s): %f' % avg_upload_rate)

state = 'upload_large_object_done'
tools.set_state(conn, system, username, state)
avg_upload_rate = size_mb / elapsed_secs
print(f'Average upload rate (MiB/s): {avg_upload_rate}')
41 changes: 9 additions & 32 deletions cscs-checks/system/openstack/src/tools.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,24 @@
import os
import re
import time
import boto.s3.connection


def get_s3_credentials():

home = os.environ['HOME']
credentials_file = "%s/.reframe_openstack" % home
credentials_file = f'{home}/.reframe_openstack'
f = open(credentials_file, 'r')
for line in f:
linesplit = line.split('=')
if linesplit[0] == 's3_access_key':
access_key = linesplit[1].rstrip()
if linesplit[0] == 's3_secret_key':
secret_key = linesplit[1].rstrip()

return (access_key, secret_key)


def get_connection():
(access_key, secret_key) = get_s3_credentials()
access_key, secret_key = get_s3_credentials()
conn = boto.connect_s3(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
Expand All @@ -33,40 +32,18 @@ def get_connection():
def delete_reframe_buckets(conn, system, username):
print('Removing Reframe test buckets')
buckets = conn.get_all_buckets()

# Remove objects/buckets
for bkt in buckets:
if not re.search(system, bkt.name):
continue

if not re.search(username, bkt.name):
continue

for obj in bkt.list():
print('Deleting object %s/%s' % (bkt.name, obj.name))
print(f'Deleting object {bkt.name}/{obj.name}')
obj.delete()
print('Deleting bucket %s' % bkt.name)
bkt.delete()


def wait_for_state(conn, system, username, state):
bkt_name = '%s_%s_reframe_s3' % (system, username)
obj_name = 'state'
while True:
print('Waiting <%s> status' % state)
time.sleep(1)
if conn.lookup(bkt_name):
bkt = conn.get_bucket(bkt_name)
if bkt.get_key(obj_name):
obj = bkt.get_key(obj_name)
content = obj.get_contents_as_string(encoding='utf-8')
if content == state:
break


def set_state(conn, system, username, state):
print('Setting state to <%s>.' % state)
bkt_name = '%s_%s_reframe_s3' % (system, username)
obj_name = 'state'
bkt = conn.lookup(bkt_name)
if bkt is None:
bkt = conn.create_bucket(bkt_name)
obj = bkt.new_key(obj_name)
obj.set_contents_from_string(state)
print(f'Deleting bucket {bkt.name}')
bkt.delete()

0 comments on commit 3125b5b

Please sign in to comment.