From 95cf6432bd53aeff01ceb9b83290f088ef034250 Mon Sep 17 00:00:00 2001 From: utkarshbhatthere Date: Fri, 31 Mar 2023 12:02:23 +0530 Subject: [PATCH] Added test script for RGW S3 interface. Signed-off-by: utkarshbhatthere --- .github/workflows/tests.yml | 21 ++++++--- docs/conf.py | 2 - scripts/appS3.py | 91 +++++++++++++++++++++++++++++++++++++ 3 files changed, 105 insertions(+), 9 deletions(-) create mode 100644 scripts/appS3.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9f456a5c..61e2c1f6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -21,6 +21,11 @@ jobs: sudo iptables -F FORWARD || true sudo ip6tables -F FORWARD || true + - name: set up python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies run: | sudo snap refresh @@ -28,6 +33,13 @@ jobs: sudo lxd init --auto sudo snap install snapcraft --classic snap list + # Python + sudo python -m pip install --upgrade pip + sudo pip install flake8 pep8-naming boto3 + + - name: Lint Check + run: | + flake8 . --count --show-source --statistics - name: Build snaps run: snapcraft @@ -118,13 +130,8 @@ jobs: - name: Exercise RGW run: | set -eux - sudo microceph.radosgw-admin user create --uid=test --display-name=test - sudo microceph.radosgw-admin key create --uid=test --key-type=s3 --access-key fooAccessKey --secret-key fooSecretKey - sudo apt-get -qq install s3cmd - echo hello-radosgw > ~/test.txt - s3cmd --host localhost --host-bucket="localhost/%(bucket)" --access_key=fooAccessKey --secret_key=fooSecretKey --no-ssl mb s3://testbucket - s3cmd --host localhost --host-bucket="localhost/%(bucket)" --access_key=fooAccessKey --secret_key=fooSecretKey --no-ssl put -P ~/test.txt s3://testbucket - curl -s http://localhost/testbucket/test.txt | grep -F hello-radosgw + sudo microceph.radosgw-admin user create --uid=test --display-name=test > keys.json + sudo python3 scripts/appS3.py http://localhost:80 keys.json --obj-num 2 - name: Upload artifacts if: always() diff --git a/docs/conf.py b/docs/conf.py index 814e6612..6bce3bb7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,8 +19,6 @@ templates_path = ['_templates'] exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'sphinxenv'] - - # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output diff --git a/scripts/appS3.py b/scripts/appS3.py new file mode 100644 index 00000000..669b22de --- /dev/null +++ b/scripts/appS3.py @@ -0,0 +1,91 @@ +import string +import random +import boto3 +import json +import argparse + + +def app_handle(args): + keys_path = args.keys + endpoint = args.endpoint + + # Fetch Auth + with open(keys_path, 'r') as keys_file: + keys_dict = json.load(keys_file) + + # Create Boto3 Client + keys = keys_dict["keys"][0] + client = boto3.resource("s3", verify=False, + endpoint_url=endpoint, + aws_access_key_id=keys["access_key"], + aws_secret_access_key=keys["secret_key"]) + + # Perform IO + objects = [] + bucket_name = "test-bucket" + client.Bucket(bucket_name).create() + for i in range(args.obj_num): + object_name = "test-object"+rand_str(4) + data = str(rand_str(random.randint(10, 30)))*1024*1024 + primary_object_one = client.Object( + bucket_name, + object_name + ) + primary_object_one.put(Body=data) + # Store for + objects.append( + (object_name, primary_object_one.content_length/(1024*1024)) + ) + + # Print Summary + print("IO Summary: Object Count {}".format(args.obj_num)) + for obj, size in objects: + print("Object: {}/{} -> Size: {}MB".format(bucket_name, obj, size)) + + # Cleanup (if asked for) + if not args.no_delete: + print("Performing Cleanup") + for obj, size in objects: + client.Object(bucket_name, obj).delete() + + +def rand_str(length: int): + return "".join( + random.choices(string.ascii_uppercase + string.digits, k=length) + ) + + +if __name__ == "__main__": + argparse = argparse.ArgumentParser( + description="An application which uses S3 for storage", + epilog="Ex: python3 appS3.py --keys keys.txt", + ) + + argparse.add_argument( + "endpoint", + type=str, + help="Provide RGW endpoint to talk to.", + ) + argparse.add_argument( + "keys", + type=str, + help="Provide JSON file generated from Ceph RGW Admin.", + ) + argparse.add_argument( + "--obj-num", + type=int, + default=1, + help="Number of objects to upload to S3.", + ) + argparse.add_argument( + "--no-delete", + action="store_true", + help="Setting this to true would not cleanup the pushed objects.", + ) + argparse.set_defaults(func=app_handle) + + # Parse the args. + args = argparse.parse_args() + + # Call the subcommand. + args.func(args)