diff --git a/.github/workflows/system.yml b/.github/workflows/system.yml index 3694d499..481a6eb5 100644 --- a/.github/workflows/system.yml +++ b/.github/workflows/system.yml @@ -8,17 +8,17 @@ jobs: fail-fast: false matrix: include: - #- set: 1 - # LOCAL_FOLDER: /tmp/gw1 - # BUCKET_ONE_NAME: versity-gwtest-bucket-one-1 - # BUCKET_TWO_NAME: versity-gwtest-bucket-two-1 - # IAM_TYPE: folder - # USERS_FOLDER: /tmp/iam1 - # AWS_ENDPOINT_URL: https://127.0.0.1:7070 - # RUN_SET: "s3cmd" - # RECREATE_BUCKETS: "true" - # PORT: 7070 - # BACKEND: "posix" + - set: 1 + LOCAL_FOLDER: /tmp/gw1 + BUCKET_ONE_NAME: versity-gwtest-bucket-one-1 + BUCKET_TWO_NAME: versity-gwtest-bucket-two-1 + IAM_TYPE: folder + USERS_FOLDER: /tmp/iam1 + AWS_ENDPOINT_URL: https://127.0.0.1:7070 + RUN_SET: "s3cmd" + RECREATE_BUCKETS: "true" + PORT: 7070 + BACKEND: "posix" - set: 2 LOCAL_FOLDER: /tmp/gw2 BUCKET_ONE_NAME: versity-gwtest-bucket-one-2 diff --git a/tests/commands/abort_multipart_upload.sh b/tests/commands/abort_multipart_upload.sh index dde8a4bb..2f9ff632 100644 --- a/tests/commands/abort_multipart_upload.sh +++ b/tests/commands/abort_multipart_upload.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash abort_multipart_upload() { + record_command "abort-multipart-upload" "client:s3api" if [ $# -ne 3 ]; then log 2 "'abort multipart upload' command requires bucket, key, upload ID" return 1 @@ -13,6 +14,7 @@ abort_multipart_upload() { } abort_multipart_upload_with_user() { + record_command "abort-multipart-upload" "client:s3api" if [ $# -ne 5 ]; then log 2 "'abort multipart upload' command requires bucket, key, upload ID, username, password" return 1 diff --git a/tests/commands/complete_multipart_upload.sh b/tests/commands/complete_multipart_upload.sh index 0d35501d..21a6e51f 100644 --- a/tests/commands/complete_multipart_upload.sh +++ b/tests/commands/complete_multipart_upload.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash complete_multipart_upload() { + record_command "complete-multipart-upload" "client:s3api" if [[ $# -ne 4 ]]; then log 2 "'complete multipart upload' command requires bucket, key, upload ID, parts list" return 1 diff --git a/tests/commands/copy_object.sh b/tests/commands/copy_object.sh index b7cace2f..f52293a5 100644 --- a/tests/commands/copy_object.sh +++ b/tests/commands/copy_object.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash copy_object() { + record-command "copy-object" "client:$1" if [ $# -ne 4 ]; then echo "copy object command requires command type, source, bucket, key" return 1 @@ -29,6 +30,7 @@ copy_object() { } copy_object_empty() { + record-command "copy-object" "client:s3api" error=$(aws --no-verify-ssl s3api copy-object 2>&1) || local result=$? if [[ $result -eq 0 ]]; then log 2 "copy object with empty parameters returned no error" diff --git a/tests/commands/create_bucket.sh b/tests/commands/create_bucket.sh index 2ba87bf1..b4b3c236 100644 --- a/tests/commands/create_bucket.sh +++ b/tests/commands/create_bucket.sh @@ -1,9 +1,12 @@ #!/usr/bin/env bash +source ./tests/report.sh + # create an AWS bucket # param: bucket name # return 0 for success, 1 for failure create_bucket() { + record_command "create-bucket" "client:$1" if [ $# -ne 2 ]; then log 2 "create bucket missing command type, bucket name" return 1 @@ -33,6 +36,7 @@ create_bucket() { } create_bucket_object_lock_enabled() { + record_command "create-bucket" "client:s3api" if [ $# -ne 1 ]; then log 2 "create bucket missing bucket name" return 1 diff --git a/tests/commands/create_multipart_upload.sh b/tests/commands/create_multipart_upload.sh index 4e1a2759..f1aede4d 100644 --- a/tests/commands/create_multipart_upload.sh +++ b/tests/commands/create_multipart_upload.sh @@ -4,6 +4,7 @@ # params: bucket, key # return 0 for success, 1 for failure create_multipart_upload() { + record_command "create-multipart-upload" "client:s3api" if [ $# -ne 2 ]; then log 2 "create multipart upload function must have bucket, key" return 1 @@ -24,6 +25,7 @@ create_multipart_upload() { } create_multipart_upload_with_user() { + record_command "create-multipart-upload" "client:s3api" if [ $# -ne 4 ]; then log 2 "create multipart upload function must have bucket, key, username, password" return 1 @@ -44,6 +46,7 @@ create_multipart_upload_with_user() { } create_multipart_upload_params() { + record_command "create-multipart-upload" "client:s3api" if [ $# -ne 8 ]; then log 2 "create multipart upload function with params must have bucket, key, content type, metadata, object lock legal hold status, " \ "object lock mode, object lock retain until date, and tagging" @@ -71,6 +74,7 @@ create_multipart_upload_params() { } create_multipart_upload_custom() { + record_command "create-multipart-upload" "client:s3api" if [ $# -lt 2 ]; then log 2 "create multipart upload custom function must have at least bucket and key" return 1 diff --git a/tests/commands/delete_bucket.sh b/tests/commands/delete_bucket.sh index 4b883b2a..4d0c974d 100644 --- a/tests/commands/delete_bucket.sh +++ b/tests/commands/delete_bucket.sh @@ -4,11 +4,17 @@ # param: bucket name # return 0 for success, 1 for failure delete_bucket() { + record_command "delete-bucket" "client:$1" if [ $# -ne 2 ]; then log 2 "delete bucket missing command type, bucket name" return 1 fi + if [[ ( $RECREATE_BUCKETS == "false" ) && (( "$2" == "$BUCKET_ONE_NAME" ) || ( "$2" == "$BUCKET_TWO_NAME" )) ]]; then + log 2 "attempt to delete main buckets in static mode" + return 1 + fi + local exit_code=0 local error if [[ $1 == 's3' ]]; then diff --git a/tests/commands/delete_bucket_policy.sh b/tests/commands/delete_bucket_policy.sh index f311560a..93ea6878 100644 --- a/tests/commands/delete_bucket_policy.sh +++ b/tests/commands/delete_bucket_policy.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash delete_bucket_policy() { + record_command "delete-bucket-policy" "client:$1" if [[ $# -ne 2 ]]; then log 2 "delete bucket policy command requires command type, bucket" return 1 @@ -23,6 +24,7 @@ delete_bucket_policy() { } delete_bucket_policy_with_user() { + record_command "delete-bucket-policy" "client:s3api" if [[ $# -ne 3 ]]; then log 2 "'delete bucket policy with user' command requires bucket, username, password" return 1 diff --git a/tests/commands/delete_bucket_tagging.sh b/tests/commands/delete_bucket_tagging.sh new file mode 100644 index 00000000..a7f3781a --- /dev/null +++ b/tests/commands/delete_bucket_tagging.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +delete_bucket_tagging() { + record_command "delete-bucket-tagging" "client:$1" + if [ $# -ne 2 ]; then + log 2 "delete bucket tagging command missing command type, bucket name" + return 1 + fi + local result + if [[ $1 == 'aws' ]]; then + tags=$(aws --no-verify-ssl s3api delete-bucket-tagging --bucket "$2" 2>&1) || result=$? + elif [[ $1 == 'mc' ]]; then + tags=$(mc --insecure tag remove "$MC_ALIAS"/"$2" 2>&1) || result=$? + else + log 2 "invalid command type $1" + return 1 + fi + if [[ $result -ne 0 ]]; then + log 2 "error deleting bucket tagging: $tags" + return 1 + fi + return 0 +} diff --git a/tests/commands/delete_object.sh b/tests/commands/delete_object.sh index 25277a32..a4d94656 100644 --- a/tests/commands/delete_object.sh +++ b/tests/commands/delete_object.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash delete_object() { + record_command "delete-object" "client:$1" if [ $# -ne 3 ]; then log 2 "delete object command requires command type, bucket, key" return 1 @@ -28,6 +29,7 @@ delete_object() { } delete_object_with_user() { + record_command "delete-object" "client:$1" if [ $# -ne 5 ]; then log 2 "delete object with user command requires command type, bucket, key, access ID, secret key" return 1 diff --git a/tests/commands/delete_object_tagging.sh b/tests/commands/delete_object_tagging.sh index bf59a382..1c4a23f1 100644 --- a/tests/commands/delete_object_tagging.sh +++ b/tests/commands/delete_object_tagging.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash delete_object_tagging() { + record_command "delete-object-tagging" "client:$1" if [[ $# -ne 3 ]]; then echo "delete object tagging command missing command type, bucket, key" return 1 diff --git a/tests/commands/delete_objects.sh b/tests/commands/delete_objects.sh new file mode 100644 index 00000000..cb9ad522 --- /dev/null +++ b/tests/commands/delete_objects.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +delete_objects() { + record_command "delete-objects" "client:s3api" + if [[ $# -ne 3 ]]; then + log 2 "'delete-objects' command requires bucket name, two object keys" + return 1 + fi + if ! error=$(aws --no-verify-ssl s3api delete-objects --bucket "$1" --delete "{ + \"Objects\": [ + {\"Key\": \"$2\"}, + {\"Key\": \"$3\"} + ] + }" 2>&1); then + log 2 "error deleting objects: $error" + return 1 + fi + return 0 +} \ No newline at end of file diff --git a/tests/commands/get_bucket_acl.sh b/tests/commands/get_bucket_acl.sh index b6b52513..69ab489a 100644 --- a/tests/commands/get_bucket_acl.sh +++ b/tests/commands/get_bucket_acl.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_bucket_acl() { + record_command "get-bucket-acl" "client:$1" if [ $# -ne 2 ]; then log 2 "bucket ACL command missing command type, bucket name" return 1 @@ -22,6 +23,7 @@ get_bucket_acl() { } get_bucket_acl_with_user() { + record_command "get-bucket-acl" "client:s3api" if [ $# -ne 3 ]; then log 2 "'get bucket ACL with user' command requires bucket name, username, password" return 1 diff --git a/tests/commands/get_bucket_location.sh b/tests/commands/get_bucket_location.sh index b2dff87f..4470fc41 100644 --- a/tests/commands/get_bucket_location.sh +++ b/tests/commands/get_bucket_location.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_bucket_location() { + record_command "get-bucket-location" "client:$1" if [[ $# -ne 2 ]]; then echo "get bucket location command requires command type, bucket name" return 1 @@ -23,6 +24,7 @@ get_bucket_location() { } get_bucket_location_aws() { + record_command "get-bucket-location" "client:s3api" if [[ $# -ne 1 ]]; then echo "get bucket location (aws) requires bucket name" return 1 @@ -38,6 +40,7 @@ get_bucket_location_aws() { } get_bucket_location_s3cmd() { + record_command "get-bucket-location" "client:s3cmd" if [[ $# -ne 1 ]]; then echo "get bucket location (s3cmd) requires bucket name" return 1 @@ -53,6 +56,7 @@ get_bucket_location_s3cmd() { } get_bucket_location_mc() { + record_command "get-bucket-location" "client:mc" if [[ $# -ne 1 ]]; then echo "get bucket location (mc) requires bucket name" return 1 diff --git a/tests/commands/get_bucket_ownership_controls.sh b/tests/commands/get_bucket_ownership_controls.sh index 3c9ca0d5..b02f51c9 100644 --- a/tests/commands/get_bucket_ownership_controls.sh +++ b/tests/commands/get_bucket_ownership_controls.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_bucket_ownership_controls() { + record_command "get-bucket-ownership-controls" "client:s3api" if [[ $# -ne 1 ]]; then log 2 "'get bucket ownership controls' command requires bucket name" return 1 diff --git a/tests/commands/get_bucket_policy.sh b/tests/commands/get_bucket_policy.sh index 574fe2b6..c3562edf 100644 --- a/tests/commands/get_bucket_policy.sh +++ b/tests/commands/get_bucket_policy.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_bucket_policy() { + record_command "get-bucket-policy" "client:$1" if [[ $# -ne 2 ]]; then log 2 "get bucket policy command requires command type, bucket" return 1 @@ -25,6 +26,7 @@ get_bucket_policy() { } get_bucket_policy_aws() { + record_command "get-bucket-policy" "client:s3api" if [[ $# -ne 1 ]]; then log 2 "aws 'get bucket policy' command requires bucket" return 1 @@ -47,6 +49,7 @@ get_bucket_policy_aws() { } get_bucket_policy_with_user() { + record_command "get-bucket-policy" "client:s3api" if [[ $# -ne 3 ]]; then log 2 "'get bucket policy with user' command requires bucket, username, password" return 1 @@ -67,6 +70,7 @@ get_bucket_policy_with_user() { } get_bucket_policy_s3cmd() { + record_command "get-bucket-policy" "client:s3cmd" if [[ $# -ne 1 ]]; then log 2 "s3cmd 'get bucket policy' command requires bucket" return 1 @@ -110,6 +114,7 @@ get_bucket_policy_s3cmd() { } get_bucket_policy_mc() { + record_command "get-bucket-policy" "client:mc" if [[ $# -ne 1 ]]; then echo "aws 'get bucket policy' command requires bucket" return 1 diff --git a/tests/commands/get_bucket_tagging.sh b/tests/commands/get_bucket_tagging.sh index 1ac5fe9c..5a24e538 100644 --- a/tests/commands/get_bucket_tagging.sh +++ b/tests/commands/get_bucket_tagging.sh @@ -4,6 +4,7 @@ # params: bucket # export 'tags' on success, return 1 for error get_bucket_tagging() { + record_command "get-bucket-tagging" "client:$1" if [ $# -ne 2 ]; then echo "get bucket tag command missing command type, bucket name" return 1 diff --git a/tests/commands/get_bucket_versioning.sh b/tests/commands/get_bucket_versioning.sh index 4622876f..c3f9c804 100644 --- a/tests/commands/get_bucket_versioning.sh +++ b/tests/commands/get_bucket_versioning.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_bucket_versioning() { + record_command "get-bucket-versioning" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "put bucket versioning command requires command type, bucket name" return 1 diff --git a/tests/commands/get_object.sh b/tests/commands/get_object.sh index e509b2f2..22c95303 100644 --- a/tests/commands/get_object.sh +++ b/tests/commands/get_object.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_object() { + record_command "get-object" "client:$1" if [ $# -ne 4 ]; then log 2 "get object command requires command type, bucket, key, destination" return 1 @@ -8,26 +9,28 @@ get_object() { local exit_code=0 local error if [[ $1 == 's3' ]]; then - error=$(aws --no-verify-ssl s3 mv "s3://$2/$3" "$4" 2>&1) || exit_code=$? + get_object_error=$(aws --no-verify-ssl s3 mv "s3://$2/$3" "$4" 2>&1) || exit_code=$? elif [[ $1 == 's3api' ]] || [[ $1 == 'aws' ]]; then - error=$(aws --no-verify-ssl s3api get-object --bucket "$2" --key "$3" "$4" 2>&1) || exit_code=$? + get_object_error=$(aws --no-verify-ssl s3api get-object --bucket "$2" --key "$3" "$4" 2>&1) || exit_code=$? elif [[ $1 == 's3cmd' ]]; then - error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate get "s3://$2/$3" "$4" 2>&1) || exit_code=$? + get_object_error=$(s3cmd "${S3CMD_OPTS[@]}" --no-check-certificate get "s3://$2/$3" "$4" 2>&1) || exit_code=$? elif [[ $1 == 'mc' ]]; then - error=$(mc --insecure get "$MC_ALIAS/$2/$3" "$4" 2>&1) || exit_code=$? + get_object_error=$(mc --insecure get "$MC_ALIAS/$2/$3" "$4" 2>&1) || exit_code=$? else log 2 "'get object' command not implemented for '$1'" return 1 fi log 5 "get object exit code: $exit_code" if [ $exit_code -ne 0 ]; then - log 2 "error getting object: $error" + log 2 "error getting object: $get_object_error" + export get_object_error return 1 fi return 0 } get_object_with_range() { + record_command "get-object" "client:s3api" if [[ $# -ne 4 ]]; then log 2 "'get object with range' requires bucket, key, range, outfile" return 1 @@ -41,6 +44,7 @@ get_object_with_range() { } get_object_with_user() { + record_command "get-object" "client:$1" if [ $# -ne 6 ]; then log 2 "'get object with user' command requires command type, bucket, key, save location, aws ID, aws secret key" return 1 diff --git a/tests/commands/get_object_attributes.sh b/tests/commands/get_object_attributes.sh index fb092983..a81b822e 100644 --- a/tests/commands/get_object_attributes.sh +++ b/tests/commands/get_object_attributes.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_object_attributes() { + record_command "get-object-attributes" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "'get object attributes' command requires bucket, key" return 1 diff --git a/tests/commands/get_object_legal_hold.sh b/tests/commands/get_object_legal_hold.sh index 418276e6..07b143ab 100644 --- a/tests/commands/get_object_legal_hold.sh +++ b/tests/commands/get_object_legal_hold.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_object_legal_hold() { + record_command "get-object-legal-hold" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "'get object legal hold' command requires bucket, key" return 1 diff --git a/tests/commands/get_object_lock_configuration.sh b/tests/commands/get_object_lock_configuration.sh index f127fc2e..ed7a2343 100644 --- a/tests/commands/get_object_lock_configuration.sh +++ b/tests/commands/get_object_lock_configuration.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_object_lock_configuration() { + record_command "get-object-lock-configuration" "client:s3api" if [[ $# -ne 1 ]]; then log 2 "'get object lock configuration' command missing bucket name" return 1 diff --git a/tests/commands/get_object_retention.sh b/tests/commands/get_object_retention.sh index 7bc0fd4b..ec41ced6 100644 --- a/tests/commands/get_object_retention.sh +++ b/tests/commands/get_object_retention.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_object_retention() { + record_command "get-object-retention" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "'get object retention' command requires bucket, key" return 1 diff --git a/tests/commands/get_object_tagging.sh b/tests/commands/get_object_tagging.sh index 14b20c1c..613858ad 100644 --- a/tests/commands/get_object_tagging.sh +++ b/tests/commands/get_object_tagging.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash get_object_tagging() { + record_command "get-object-tagging" "client:$1" if [ $# -ne 3 ]; then log 2 "get object tag command missing command type, bucket, and/or key" return 1 diff --git a/tests/commands/head_bucket.sh b/tests/commands/head_bucket.sh index 938d63d4..235bb6ec 100644 --- a/tests/commands/head_bucket.sh +++ b/tests/commands/head_bucket.sh @@ -1,6 +1,9 @@ #!/usr/bin/env bash +source ./tests/report.sh + head_bucket() { + record_command "head-bucket" "client:$1" if [ $# -ne 2 ]; then echo "head bucket command missing command type, bucket name" return 1 diff --git a/tests/commands/head_object.sh b/tests/commands/head_object.sh index d832b1da..cdabd91a 100644 --- a/tests/commands/head_object.sh +++ b/tests/commands/head_object.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash head_object() { + record_command "head-object" "client:$1" if [ $# -ne 3 ]; then log 2 "head-object missing command, bucket name, object name" return 2 diff --git a/tests/commands/list_buckets.sh b/tests/commands/list_buckets.sh index 9e2340ee..a6cb249c 100644 --- a/tests/commands/list_buckets.sh +++ b/tests/commands/list_buckets.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash list_buckets() { + record_command "list-buckets" "client:$1" if [ $# -ne 1 ]; then echo "list buckets command missing command type" return 1 diff --git a/tests/commands/list_multipart_uploads.sh b/tests/commands/list_multipart_uploads.sh index 4a6f96ea..fa8ac0d6 100644 --- a/tests/commands/list_multipart_uploads.sh +++ b/tests/commands/list_multipart_uploads.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash list_multipart_uploads() { + record_command "list-multipart-uploads" "client:s3api" if [[ $# -ne 1 ]]; then log 2 "'list multipart uploads' command requires bucket name" return 1 @@ -13,6 +14,7 @@ list_multipart_uploads() { } list_multipart_uploads_with_user() { + record_command "list-multipart-uploads" "client:s3api" if [[ $# -ne 3 ]]; then log 2 "'list multipart uploads' command requires bucket name, username, password" return 1 diff --git a/tests/commands/list_object_versions.sh b/tests/commands/list_object_versions.sh index da795657..d0d8b387 100644 --- a/tests/commands/list_object_versions.sh +++ b/tests/commands/list_object_versions.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash list_object_versions() { + record_command "list-object-versions" "client:s3api" if [[ $# -ne 1 ]]; then log 2 "'list object versions' command requires bucket name" return 1 diff --git a/tests/commands/list_objects.sh b/tests/commands/list_objects.sh index 479138ec..e0ab3dba 100644 --- a/tests/commands/list_objects.sh +++ b/tests/commands/list_objects.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash list_objects() { + record_command "list-objects" "client:$1" if [ $# -ne 2 ]; then echo "list objects command requires command type, and bucket or folder" return 1 diff --git a/tests/commands/put_bucket_acl.sh b/tests/commands/put_bucket_acl.sh index 2340f3ba..3f6cd933 100644 --- a/tests/commands/put_bucket_acl.sh +++ b/tests/commands/put_bucket_acl.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash put_bucket_acl() { + record_command "put-bucket-acl" "client:$1" if [[ $# -ne 3 ]]; then log 2 "put bucket acl command requires command type, bucket name, acls or username" return 1 @@ -24,11 +25,12 @@ put_bucket_acl() { } put_bucket_canned_acl() { + record_command "put-bucket-acl" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "'put bucket canned acl' command requires bucket name, canned ACL" return 1 fi - if ! error=$(aws --no-verify-ssl s3api put-bucket-acl --bucket "$1" --acl "$2"); then + if ! error=$(aws --no-verify-ssl s3api put-bucket-acl --bucket "$1" --acl "$2" 2>&1); then log 2 "error re-setting bucket acls: $error" return 1 fi @@ -36,11 +38,12 @@ put_bucket_canned_acl() { } put_bucket_canned_acl_with_user() { + record_command "put-bucket-acl" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "'put bucket canned acl with user' command requires bucket name, canned ACL, username, password" return 1 fi - if ! error=$(AWS_ACCESS_KEY_ID="$3" AWS_SECRET_ACCESS_KEY="$4" aws --no-verify-ssl s3api put-bucket-acl --bucket "$1" --acl "$2"); then + if ! error=$(AWS_ACCESS_KEY_ID="$3" AWS_SECRET_ACCESS_KEY="$4" aws --no-verify-ssl s3api put-bucket-acl --bucket "$1" --acl "$2" 2>&1); then log 2 "error re-setting bucket acls: $error" return 1 fi diff --git a/tests/commands/put_bucket_ownership_controls.sh b/tests/commands/put_bucket_ownership_controls.sh index b9628c10..8b4f0c67 100644 --- a/tests/commands/put_bucket_ownership_controls.sh +++ b/tests/commands/put_bucket_ownership_controls.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash put_bucket_ownership_controls() { + record_command "put-bucket-ownership-controls" "client:s3api" if [[ $# -ne 2 ]]; then log 2 "'put bucket ownership controls' command requires bucket name, control" return 1 diff --git a/tests/commands/put_bucket_policy.sh b/tests/commands/put_bucket_policy.sh index d9a97d8a..a8c77ad0 100644 --- a/tests/commands/put_bucket_policy.sh +++ b/tests/commands/put_bucket_policy.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash put_bucket_policy() { + record_command "put-bucket-policy" "client:$1" if [[ $# -ne 3 ]]; then log 2 "'put bucket policy' command requires command type, bucket, policy file" return 1 @@ -26,6 +27,7 @@ put_bucket_policy() { } put_bucket_policy_with_user() { + record_command "put-bucket-policy" "client:s3api" if [[ $# -ne 4 ]]; then log 2 "'put bucket policy with user' command requires bucket, policy file, username, password" return 1 diff --git a/tests/commands/put_bucket_versioning.sh b/tests/commands/put_bucket_versioning.sh index b6352ae5..377fb51b 100644 --- a/tests/commands/put_bucket_versioning.sh +++ b/tests/commands/put_bucket_versioning.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash put_bucket_versioning() { + record_command "put-bucket-versioning" "client:s3api" if [[ $# -ne 3 ]]; then log 2 "put bucket versioning command requires command type, bucket name, 'Enabled' or 'Suspended'" return 1 diff --git a/tests/commands/put_object.sh b/tests/commands/put_object.sh index 1c1a622b..ffa34acf 100644 --- a/tests/commands/put_object.sh +++ b/tests/commands/put_object.sh @@ -1,6 +1,9 @@ #!/usr/bin/env bash +source ./tests/report.sh + put_object() { + record_command "put-object" "client:$1" if [ $# -ne 4 ]; then log 2 "put object command requires command type, source, destination bucket, destination key" return 1 @@ -28,6 +31,7 @@ put_object() { } put_object_with_user() { + record_command "put-object" "client:$1" if [ $# -ne 6 ]; then log 2 "put object command requires command type, source, destination bucket, destination key, aws ID, aws secret key" return 1 diff --git a/tests/commands/put_object_legal_hold.sh b/tests/commands/put_object_legal_hold.sh index a9fa3e31..dda27bf9 100644 --- a/tests/commands/put_object_legal_hold.sh +++ b/tests/commands/put_object_legal_hold.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash put_object_legal_hold() { + record_command "put-object-legal-hold" "client:s3api" if [[ $# -ne 3 ]]; then log 2 "'put object legal hold' command requires bucket, key, hold status ('ON' or 'OFF')" return 1 diff --git a/tests/commands/put_object_retention.sh b/tests/commands/put_object_retention.sh index 715a48c2..ab5396e8 100644 --- a/tests/commands/put_object_retention.sh +++ b/tests/commands/put_object_retention.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash put_object_retention() { + record_command "put-object-retention" "client:s3api" if [[ $# -ne 4 ]]; then log 2 "'put object retention' command requires bucket, key, retention mode, retention date" return 1 diff --git a/tests/commands/select_object_content.sh b/tests/commands/select_object_content.sh index 1ba02d7c..6c9ef3d0 100644 --- a/tests/commands/select_object_content.sh +++ b/tests/commands/select_object_content.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash select_object_content() { + record_command "select-object-content" "client:s3api" if [[ $# -ne 7 ]]; then log 2 "'select object content' command requires bucket, key, expression, expression type, input serialization, output serialization, outfile" return 1 diff --git a/tests/commands/upload_part_copy.sh b/tests/commands/upload_part_copy.sh index 4c08f589..1a35821c 100644 --- a/tests/commands/upload_part_copy.sh +++ b/tests/commands/upload_part_copy.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash upload_part_copy() { + record_command "upload-part-copy" "client:s3api" if [ $# -ne 5 ]; then echo "upload multipart part copy function must have bucket, key, upload ID, file name, part number" return 1 @@ -17,6 +18,7 @@ upload_part_copy() { } upload_part_copy_with_range() { + record_command "upload-part-copy" "client:s3api" if [ $# -ne 6 ]; then log 2 "upload multipart part copy function must have bucket, key, upload ID, file name, part number, range" return 1 diff --git a/tests/env.sh b/tests/env.sh index 4fadedea..f2605989 100644 --- a/tests/env.sh +++ b/tests/env.sh @@ -92,6 +92,9 @@ check_universal_vars() { if [[ -n "$DIRECT" ]]; then export DIRECT fi + if [[ -n "$COVERAGE_DB" ]]; then + export COVERAGE_DB + fi } check_versity_vars() { diff --git a/tests/report.sh b/tests/report.sh new file mode 100644 index 00000000..8b9fa88b --- /dev/null +++ b/tests/report.sh @@ -0,0 +1,86 @@ +#!/usr/bin/env bash + +check_and_create_database() { + # Define SQL commands to create a table + SQL_CREATE_TABLE="CREATE TABLE IF NOT EXISTS entries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + command TEXT NOT NULL, + client TEXT NOT NULL, + count INTEGER DEFAULT 1, + UNIQUE(command, client) + );" + +# Execute the SQL commands to create the database and table +sqlite3 "$COVERAGE_DB" <&1); then + log 2 "error in sqlite statement: $error" + fi +} + +record_result() { + if [ -z "$COVERAGE_DB" ]; then + log 5 "no coverage db set, not recording" + return 0 + fi + # Define SQL commands to create a table + SQL_CREATE_TABLE="CREATE TABLE IF NOT EXISTS results ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + command TEXT NOT NULL, + client TEXT, + count INTEGER, + pass INTEGER DEFAULT 1, + UNIQUE(command, client) + );" + # Execute the SQL commands to create the database and table + sqlite3 "$COVERAGE_DB" < "$test_file_folder/$bucket_file" - setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? - [[ $setup_result -eq 0 ]] || fail "error setting up bucket" - put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error putting object" - get_object_with_range "$BUCKET_ONE_NAME" "$bucket_file" "bytes=9-15" "$test_file_folder/$bucket_file-range" || fail "error getting range" - [[ "$(cat "$test_file_folder/$bucket_file-range")" == "9" ]] || fail "byte range not copied properly" + test_get_object_full_range_aws_root } @test "test_get_object_invalid_range" { - bucket_file="bucket_file" - - create_test_files "$bucket_file" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? - [[ $setup_result -eq 0 ]] || fail "error setting up bucket" - put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error putting object" - get_object_with_range "$BUCKET_ONE_NAME" "$bucket_file" "bytes=0-0" "$test_file_folder/$bucket_file-range" || local get_result=$? - [[ $get_result -ne 0 ]] || fail "Get object with zero range returned no error" + test_get_object_invalid_range_aws_root } -@test "test_put_object" { - bucket_file="bucket_file" - - create_test_files "$bucket_file" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? - [[ $setup_result -eq 0 ]] || fail "error setting up bucket" - setup_bucket "s3api" "$BUCKET_TWO_NAME" || local setup_result_two=$? - [[ $setup_result_two -eq 0 ]] || fail "Bucket two setup error" - put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$? - [[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket" - copy_error=$(aws --no-verify-ssl s3api copy-object --copy-source "$BUCKET_ONE_NAME/$bucket_file" --key "$bucket_file" --bucket "$BUCKET_TWO_NAME" 2>&1) || local copy_result=$? - [[ $copy_result -eq 0 ]] || fail "Error copying file: $copy_error" - copy_file "s3://$BUCKET_TWO_NAME/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local copy_result=$? - [[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket" - compare_files "$test_file_folder/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local compare_result=$? - [[ $compare_result -eq 0 ]] || file "files don't match" - - delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" - delete_bucket_or_contents "aws" "$BUCKET_TWO_NAME" - delete_test_files "$bucket_file" +# get-object-attributes +@test "test_get_object_attributes" { + test_get_object_attributes_aws_root } -@test "test_create_bucket_invalid_name" { - if [[ $RECREATE_BUCKETS != "true" ]]; then - return - fi - - create_bucket_invalid_name "aws" || local create_result=$? - [[ $create_result -eq 0 ]] || fail "Invalid name test failed" - - [[ "$bucket_create_error" == *"Invalid bucket name "* ]] || fail "unexpected error: $bucket_create_error" +@test "test_put_object" { + test_put_object_aws_root } # test adding and removing an object on versitygw @@ -312,195 +140,26 @@ export RUN_USERS=true test_common_list_objects "aws" } - -@test "test_get_object_attributes" { - bucket_file="bucket_file" - - create_test_files "$bucket_file" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating test files" - setup_bucket "s3api" "$BUCKET_ONE_NAME" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating bucket" - put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$? - [[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket" - get_object_attributes "$BUCKET_ONE_NAME" "$bucket_file" || local get_result=$? - [[ $get_result -eq 0 ]] || fail "failed to get object attributes" - # shellcheck disable=SC2154 - if echo "$attributes" | jq -e 'has("ObjectSize")'; then - object_size=$(echo "$attributes" | jq ".ObjectSize") - [[ $object_size == 0 ]] || fail "Incorrect object size: $object_size" - else - fail "ObjectSize parameter missing: $attributes" - fi - delete_bucket_or_contents "s3api" "$BUCKET_ONE_NAME" -} - #@test "test_get_put_object_legal_hold" { -# # bucket must be created with lock for legal hold -# if [[ $RECREATE_BUCKETS == false ]]; then -# return -# fi -# -# bucket_file="bucket_file" -# username="ABCDEFG" -# password="HIJKLMN" -# -# legal_hold_retention_setup "$username" "$password" "$bucket_file" -# -# get_object_lock_configuration "$BUCKET_ONE_NAME" || fail "error getting lock configuration" -# # shellcheck disable=SC2154 -# log 5 "$lock_config" -# enabled=$(echo "$lock_config" | jq -r ".ObjectLockConfiguration.ObjectLockEnabled") -# [[ $enabled == "Enabled" ]] || fail "ObjectLockEnabled should be 'Enabled', is '$enabled'" -# -# put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "ON" || fail "error putting legal hold on object" -# get_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" || fail "error getting object legal hold status" -# # shellcheck disable=SC2154 -# log 5 "$legal_hold" -# hold_status=$(echo "$legal_hold" | grep -v "InsecureRequestWarning" | jq -r ".LegalHold.Status" 2>&1) || fail "error obtaining hold status: $hold_status" -# [[ $hold_status == "ON" ]] || fail "Status should be 'ON', is '$hold_status'" -# -# echo "fdkljafajkfs" > "$test_file_folder/$bucket_file" -# if put_object_with_user "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password"; then -# fail "able to overwrite object with hold" -# fi -# # shellcheck disable=SC2154 -# #[[ $put_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $put_object_error" -# -# if delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password"; then -# fail "able to delete object with hold" -# fi -# # shellcheck disable=SC2154 -# [[ $delete_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $delete_object_error" -# put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF" || fail "error removing legal hold on object" -# delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password" || fail "error deleting object after removing legal hold" -# -# delete_bucket_recursive "s3api" "$BUCKET_ONE_NAME" +# test_get_put_object_legal_hold_aws_root #} #@test "test_get_put_object_retention" { -# # bucket must be created with lock for legal hold -# if [[ $RECREATE_BUCKETS == false ]]; then -# return -# fi -# -# bucket_file="bucket_file" -# username="ABCDEFG" -# secret_key="HIJKLMN" -# -# legal_hold_retention_setup "$username" "$secret_key" "$bucket_file" -# -# get_object_lock_configuration "$BUCKET_ONE_NAME" || fail "error getting lock configuration" -# log 5 "$lock_config" -# enabled=$(echo "$lock_config" | jq -r ".ObjectLockConfiguration.ObjectLockEnabled") -# [[ $enabled == "Enabled" ]] || fail "ObjectLockEnabled should be 'Enabled', is '$enabled'" -# -# if [[ "$OSTYPE" == "darwin"* ]]; then -# retention_date=$(date -v+2d +"%Y-%m-%dT%H:%M:%S") -# else -# retention_date=$(date -d "+2 days" +"%Y-%m-%dT%H:%M:%S") -# fi -# put_object_retention "$BUCKET_ONE_NAME" "$bucket_file" "GOVERNANCE" "$retention_date" || fail "failed to add object retention" -# get_object_retention "$BUCKET_ONE_NAME" "$bucket_file" || fail "failed to get object retention" -# log 5 "$retention" -# retention=$(echo "$retention" | grep -v "InsecureRequestWarning") -# mode=$(echo "$retention" | jq -r ".Retention.Mode") -# retain_until_date=$(echo "$retention" | jq -r ".Retention.RetainUntilDate") -# [[ $mode == "GOVERNANCE" ]] || fail "retention mode should be governance, is $mode" -# [[ $retain_until_date == "$retention_date"* ]] || fail "retain until date should be $retention_date, is $retain_until_date" -# -# echo "fdkljafajkfs" > "$test_file_folder/$bucket_file" -# put_object_with_user "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || local put_result=$? -# [[ $put_result -ne 0 ]] || fail "able to overwrite object with hold" -# [[ $error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $error" -# -# delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || local delete_result=$? -# [[ $delete_result -ne 0 ]] || fail "able to delete object with hold" -# [[ $error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $error" -# -# delete_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error deleting object" -# delete_bucket_recursive "s3api" "$BUCKET_ONE_NAME" +# test_get_put_object_retention_aws_root #} -legal_hold_retention_setup() { - [[ $# -eq 3 ]] || fail "legal hold or retention setup requires username, secret key, bucket file" - - delete_bucket_or_contents_if_exists "s3api" "$BUCKET_ONE_NAME" || fail "error deleting bucket, or checking for existence" - setup_user "$1" "$2" "user" || fail "error creating user if nonexistent" - create_test_files "$3" || fail "error creating test files" - - #create_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error creating bucket" - create_bucket_object_lock_enabled "$BUCKET_ONE_NAME" || fail "error creating bucket" - change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$1" || fail "error changing bucket ownership" - get_bucket_policy "s3api" "$BUCKET_ONE_NAME" || fail "error getting bucket policy" - log 5 "POLICY: $bucket_policy" - get_bucket_owner "$BUCKET_ONE_NAME" - log 5 "owner: $bucket_owner" - #put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred" || fail "error putting bucket ownership controls" - put_object_with_user "s3api" "$test_file_folder/$3" "$BUCKET_ONE_NAME" "$3" "$1" "$2" || fail "failed to add object to bucket" -} - @test "test_put_bucket_acl" { test_common_put_bucket_acl "s3api" } # test v1 s3api list objects command @test "test-s3api-list-objects-v1" { - local object_one="test-file-one" - local object_two="test-file-two" - local object_two_data="test data\n" - - create_test_files "$object_one" "$object_two" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating test files" - printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two" - setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? - [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" - put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local copy_result_one=$? - [[ $copy_result_one -eq 0 ]] || fail "Failed to add object $object_one" - put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_result_two=$? - [[ $copy_result_two -eq 0 ]] || fail "Failed to add object $object_two" - - list_objects_s3api_v1 "$BUCKET_ONE_NAME" - key_one=$(echo "$objects" | jq -r '.Contents[0].Key') - [[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)" - size_one=$(echo "$objects" | jq -r '.Contents[0].Size') - [[ $size_one -eq 0 ]] || fail "Object one size mismatch ($size_one, 0)" - key_two=$(echo "$objects" | jq -r '.Contents[1].Key') - [[ $key_two == "$object_two" ]] || fail "Object two mismatch ($key_two, $object_two)" - size_two=$(echo "$objects" | jq '.Contents[1].Size') - [[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch ($size_two, ${#object_two_data})" - - delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" - delete_test_files "$object_one" "$object_two" + test_s3api_list_objects_v1_aws_root } # test v2 s3api list objects command @test "test-s3api-list-objects-v2" { - local object_one="test-file-one" - local object_two="test-file-two" - local object_two_data="test data\n" - - create_test_files "$object_one" "$object_two" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating test files" - printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two" - setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? - [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" - put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local copy_object_one=$? - [[ $copy_object_one -eq 0 ]] || fail "Failed to add object $object_one" - put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_object_two=$? - [[ $copy_object_two -eq 0 ]] || fail "Failed to add object $object_two" - - list_objects_s3api_v2 "$BUCKET_ONE_NAME" - key_one=$(echo "$objects" | jq -r '.Contents[0].Key') - [[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)" - size_one=$(echo "$objects" | jq -r '.Contents[0].Size') - [[ $size_one -eq 0 ]] || fail "Object one size mismatch ($size_one, 0)" - key_two=$(echo "$objects" | jq -r '.Contents[1].Key') - [[ $key_two == "$object_two" ]] || fail "Object two mismatch ($key_two, $object_two)" - size_two=$(echo "$objects" | jq -r '.Contents[1].Size') - [[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch ($size_two, ${#object_two_data})" - - delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" - delete_test_files "$object_one" "$object_two" + test_s3api_list_objects_v2_aws_root } # test abilty to set and retrieve object tags @@ -510,45 +169,7 @@ legal_hold_retention_setup() { # test multi-part upload list parts command @test "test-multipart-upload-list-parts" { - local bucket_file="bucket-file" - - create_test_files "$bucket_file" || fail "error creating test file" - dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error creating test file" - setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "failed to create bucket '$BUCKET_ONE_NAME'" - - list_parts "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || fail "listing multipart upload parts failed" - - declare -a parts_map - # shellcheck disable=SC2154 - log 5 "parts: $parts" - for i in {0..3}; do - local part_number - local etag - # shellcheck disable=SC2154 - part=$(echo "$parts" | grep -v "InsecureRequestWarning" | jq -r ".[$i]" 2>&1) || fail "error getting part: $part" - part_number=$(echo "$part" | jq ".PartNumber" 2>&1) || fail "error parsing part number: $part_number" - [[ $part_number != "" ]] || fail "error: blank part number" - - etag=$(echo "$part" | jq ".ETag" 2>&1) || fail "error parsing etag: $etag" - [[ $etag != "" ]] || fail "error: blank etag" - # shellcheck disable=SC2004 - parts_map[$part_number]=$etag - done - [[ ${#parts_map[@]} -ne 0 ]] || fail "error loading multipart upload parts to check" - - for i in {0..3}; do - local part_number - local etag - # shellcheck disable=SC2154 - listed_part=$(echo "$listed_parts" | grep -v "InsecureRequestWarning" | jq -r ".Parts[$i]" 2>&1) || fail "error parsing listed part: $listed_part" - part_number=$(echo "$listed_part" | jq ".PartNumber" 2>&1) || fail "error parsing listed part number: $part_number" - etag=$(echo "$listed_part" | jq ".ETag" 2>&1) || fail "error getting listed etag: $etag" - [[ ${parts_map[$part_number]} == "$etag" ]] || fail "error: etags don't match (part number: $part_number, etags ${parts_map[$part_number]},$etag)" - done - - run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file" 4 - delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" - delete_test_files $bucket_file + test_multipart_upload_list_parts_aws_root } # test listing of active uploads @@ -584,18 +205,14 @@ legal_hold_retention_setup() { @test "test-multipart-upload-from-bucket" { local bucket_file="bucket-file" - create_test_files "$bucket_file" || local created=$? - [[ $created -eq 0 ]] || fail "Error creating test files" - dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error creating test file" - setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? - [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" + create_test_files "$bucket_file" || fail "error creating test files" + dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error adding data to test file" + setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "failed to create bucket: $BUCKET_ONE_NAME" - multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || upload_result=$? - [[ $upload_result -eq 0 ]] || fail "Error performing multipart upload" + multipart_upload_from_bucket "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || fail "error performing multipart upload" - get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$test_file_folder/$bucket_file-copy" - compare_files "$test_file_folder"/$bucket_file-copy "$test_file_folder"/$bucket_file || compare_result=$? - [[ $compare_result -eq 0 ]] || fail "Data doesn't match" + get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file-copy" "$test_file_folder/$bucket_file-copy" || fail "error getting object" + compare_files "$test_file_folder"/$bucket_file-copy "$test_file_folder"/$bucket_file || fail "data doesn't match" delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" delete_test_files $bucket_file @@ -1230,11 +847,6 @@ EOF # test_common_list_objects_file_count "aws" #} -#@test "test_filename_length" { -# file_name=$(printf "%0.sa" $(seq 1 1025)) -# echo "$file_name" - - # ensure that lists of files greater than a size of 1000 (pagination) are returned properly #@test "test_list_objects_file_count" { # test_common_list_objects_file_count "aws" diff --git a/tests/test_aws_root_inner.sh b/tests/test_aws_root_inner.sh new file mode 100755 index 00000000..e8a827f0 --- /dev/null +++ b/tests/test_aws_root_inner.sh @@ -0,0 +1,440 @@ +#!/usr/bin/env bats + +source ./tests/commands/delete_objects.sh + +test_abort_multipart_upload_aws_root() { + local bucket_file="bucket-file" + + create_test_files "$bucket_file" || fail "error creating test files" + # shellcheck disable=SC2154 + dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error creating test file" + + setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "Failed to create bucket '$BUCKET_ONE_NAME'" + + run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || fail "abort failed" + + if object_exists "aws" "$BUCKET_ONE_NAME" "$bucket_file"; then + fail "Upload file exists after abort" + fi + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files $bucket_file +} + +test_complete_multipart_upload_aws_root() { + local bucket_file="bucket-file" + + create_test_files "$bucket_file" || fail "error creating test files" + dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error creating test file" + + setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "failed to create bucket '$BUCKET_ONE_NAME'" + + multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || fail "error performing multipart upload" + + download_and_compare_file "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "error downloading and comparing file" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files $bucket_file +} + +test_create_multipart_upload_properties_aws_root() { + local bucket_file="bucket-file" + + local expected_content_type="application/zip" + local expected_meta_key="testKey" + local expected_meta_val="testValue" + local expected_hold_status="ON" + local expected_retention_mode="GOVERNANCE" + local expected_tag_key="TestTag" + local expected_tag_val="TestTagVal" + local five_seconds_later + + os_name="$(uname)" + if [[ "$os_name" == "Darwin" ]]; then + now=$(date -u +"%Y-%m-%dT%H:%M:%S") + later=$(date -j -v +15S -f "%Y-%m-%dT%H:%M:%S" "$now" +"%Y-%m-%dT%H:%M:%S") + else + now=$(date +"%Y-%m-%dT%H:%M:%S") + later=$(date -d "$now 15 seconds" +"%Y-%m-%dT%H:%M:%S") + fi + + create_test_files "$bucket_file" || fail "error creating test file" + dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error creating test file" + + delete_bucket_or_contents_if_exists "s3api" "$BUCKET_ONE_NAME" || fail "error deleting bucket, or checking for existence" + # in static bucket config, bucket will still exist + bucket_exists "s3api" "$BUCKET_ONE_NAME" || local exists_result=$? + [[ $exists_result -ne 2 ]] || fail "error checking for bucket existence" + if [[ $exists_result -eq 1 ]]; then + create_bucket_object_lock_enabled "$BUCKET_ONE_NAME" || fail "error creating bucket" + fi + get_object_lock_configuration "$BUCKET_ONE_NAME" || fail "error getting log config" + # shellcheck disable=SC2154 + log 5 "LOG CONFIG: $log_config" + + log 5 "LATER: $later" + multipart_upload_with_params "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 \ + "$expected_content_type" \ + "{\"$expected_meta_key\": \"$expected_meta_val\"}" \ + "$expected_hold_status" \ + "$expected_retention_mode" \ + "$later" \ + "$expected_tag_key=$expected_tag_val" || fail "error performing multipart upload" + + head_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error getting metadata" + # shellcheck disable=SC2154 + raw_metadata=$(echo "$metadata" | grep -v "InsecureRequestWarning") + log 5 "raw metadata: $raw_metadata" + + content_type=$(echo "$raw_metadata" | jq -r ".ContentType") + [[ $content_type == "$expected_content_type" ]] || fail "content type mismatch ($content_type, $expected_content_type)" + meta_val=$(echo "$raw_metadata" | jq -r ".Metadata.$expected_meta_key") + [[ $meta_val == "$expected_meta_val" ]] || fail "metadata val mismatch ($meta_val, $expected_meta_val)" + hold_status=$(echo "$raw_metadata" | jq -r ".ObjectLockLegalHoldStatus") + [[ $hold_status == "$expected_hold_status" ]] || fail "hold status mismatch ($hold_status, $expected_hold_status)" + retention_mode=$(echo "$raw_metadata" | jq -r ".ObjectLockMode") + [[ $retention_mode == "$expected_retention_mode" ]] || fail "retention mode mismatch ($retention_mode, $expected_retention_mode)" + retain_until_date=$(echo "$raw_metadata" | jq -r ".ObjectLockRetainUntilDate") + [[ $retain_until_date == "$later"* ]] || fail "retention date mismatch ($retain_until_date, $five_seconds_later)" + + get_object_tagging "aws" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error getting tagging" + # shellcheck disable=SC2154 + log 5 "tags: $tags" + tag_key=$(echo "$tags" | jq -r ".TagSet[0].Key") + [[ $tag_key == "$expected_tag_key" ]] || fail "tag mismatch ($tag_key, $expected_tag_key)" + tag_val=$(echo "$tags" | jq -r ".TagSet[0].Value") + [[ $tag_val == "$expected_tag_val" ]] || fail "tag mismatch ($tag_val, $expected_tag_val)" + + put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF" || fail "error disabling legal hold" + head_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error getting metadata" + + get_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "error getting object" + compare_files "$test_file_folder/$bucket_file" "$test_file_folder/$bucket_file-copy" || fail "files not equal" + + sleep 15 + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files $bucket_file +} + +test_delete_objects_aws_root() { + local object_one="test-file-one" + local object_two="test-file-two" + + create_test_files "$object_one" "$object_two" || fail "error creating test files" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error creating bucket" + + put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || fail "error adding object one" + put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || fail "error adding object two" + + delete_objects "$BUCKET_ONE_NAME" "$object_one" "$object_two" || fail "error deleting objects" + + object_exists "s3api" "$BUCKET_ONE_NAME" "$object_one" || local object_one_exists_result=$? + [[ $object_one_exists_result -eq 1 ]] || fail "object $object_one not deleted" + object_exists "s3api" "$BUCKET_ONE_NAME" "$object_two" || local object_two_exists_result=$? + [[ $object_two_exists_result -eq 1 ]] || fail "object $object_two not deleted" + + delete_bucket_or_contents "s3api" "$BUCKET_ONE_NAME" + delete_test_files "$object_one" "$object_two" +} + +test_get_bucket_acl_aws_root() { + setup_bucket "aws" "$BUCKET_ONE_NAME" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating bucket" + + get_bucket_acl "s3api" "$BUCKET_ONE_NAME" || local result=$? + [[ $result -eq 0 ]] || fail "Error retrieving acl" + + # shellcheck disable=SC2154 + id=$(echo "$acl" | grep -v "InsecureRequestWarning" | jq '.Owner.ID') + [[ $id == '"'"$AWS_ACCESS_KEY_ID"'"' ]] || fail "Acl mismatch" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" +} + +test_get_object_full_range_aws_root() { + bucket_file="bucket_file" + + create_test_files "$bucket_file" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating test files" + echo -n "0123456789" > "$test_file_folder/$bucket_file" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? + [[ $setup_result -eq 0 ]] || fail "error setting up bucket" + put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error putting object" + get_object_with_range "$BUCKET_ONE_NAME" "$bucket_file" "bytes=9-15" "$test_file_folder/$bucket_file-range" || fail "error getting range" + [[ "$(cat "$test_file_folder/$bucket_file-range")" == "9" ]] || fail "byte range not copied properly" +} + +test_get_object_invalid_range_aws_root() { + bucket_file="bucket_file" + + create_test_files "$bucket_file" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating test files" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? + [[ $setup_result -eq 0 ]] || fail "error setting up bucket" + put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error putting object" + get_object_with_range "$BUCKET_ONE_NAME" "$bucket_file" "bytes=0-0" "$test_file_folder/$bucket_file-range" || local get_result=$? + [[ $get_result -ne 0 ]] || fail "Get object with zero range returned no error" +} + +test_put_object_aws_root() { + bucket_file="bucket_file" + + create_test_files "$bucket_file" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating test files" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || local setup_result=$? + [[ $setup_result -eq 0 ]] || fail "error setting up bucket" + setup_bucket "s3api" "$BUCKET_TWO_NAME" || local setup_result_two=$? + [[ $setup_result_two -eq 0 ]] || fail "Bucket two setup error" + put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || local copy_result=$? + [[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket" + copy_error=$(aws --no-verify-ssl s3api copy-object --copy-source "$BUCKET_ONE_NAME/$bucket_file" --key "$bucket_file" --bucket "$BUCKET_TWO_NAME" 2>&1) || local copy_result=$? + [[ $copy_result -eq 0 ]] || fail "Error copying file: $copy_error" + copy_file "s3://$BUCKET_TWO_NAME/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local copy_result=$? + [[ $copy_result -eq 0 ]] || fail "Failed to add object to bucket" + compare_files "$test_file_folder/$bucket_file" "$test_file_folder/${bucket_file}_copy" || local compare_result=$? + [[ $compare_result -eq 0 ]] || file "files don't match" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_bucket_or_contents "aws" "$BUCKET_TWO_NAME" + delete_test_files "$bucket_file" +} + +test_create_bucket_invalid_name_aws_root() { + if [[ $RECREATE_BUCKETS != "true" ]]; then + return + fi + + create_bucket_invalid_name "aws" || local create_result=$? + [[ $create_result -eq 0 ]] || fail "Invalid name test failed" + + # shellcheck disable=SC2154 + [[ "$bucket_create_error" == *"Invalid bucket name "* ]] || fail "unexpected error: $bucket_create_error" +} + +test_get_object_attributes_aws_root() { + bucket_file="bucket_file" + + create_test_files "$bucket_file" || fail "error creating test files" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket" + put_object "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" || fail "failed to add object to bucket" + get_object_attributes "$BUCKET_ONE_NAME" "$bucket_file" || failed "failed to get object attributes" + # shellcheck disable=SC2154 + has_object_size=$(echo "$attributes" | jq -e '.ObjectSize' 2>&1) || fail "error checking for ObjectSize parameters: $has_object_size" + if [[ $has_object_size -eq 0 ]]; then + object_size=$(echo "$attributes" | jq -r ".ObjectSize") + [[ $object_size == 0 ]] || fail "Incorrect object size: $object_size" + else + fail "ObjectSize parameter missing: $attributes" + fi + delete_bucket_or_contents "s3api" "$BUCKET_ONE_NAME" +} + +test_get_put_object_legal_hold_aws_root() { + # bucket must be created with lock for legal hold + if [[ $RECREATE_BUCKETS == false ]]; then + return + fi + + bucket_file="bucket_file" + username="ABCDEFG" + password="HIJKLMN" + + legal_hold_retention_setup "$username" "$password" "$bucket_file" + + get_object_lock_configuration "$BUCKET_ONE_NAME" || fail "error getting lock configuration" + # shellcheck disable=SC2154 + log 5 "$lock_config" + enabled=$(echo "$lock_config" | jq -r ".ObjectLockConfiguration.ObjectLockEnabled") + [[ $enabled == "Enabled" ]] || fail "ObjectLockEnabled should be 'Enabled', is '$enabled'" + + put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "ON" || fail "error putting legal hold on object" + get_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" || fail "error getting object legal hold status" + # shellcheck disable=SC2154 + log 5 "$legal_hold" + hold_status=$(echo "$legal_hold" | grep -v "InsecureRequestWarning" | jq -r ".LegalHold.Status" 2>&1) || fail "error obtaining hold status: $hold_status" + [[ $hold_status == "ON" ]] || fail "Status should be 'ON', is '$hold_status'" + + echo "fdkljafajkfs" > "$test_file_folder/$bucket_file" + if put_object_with_user "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password"; then + fail "able to overwrite object with hold" + fi + # shellcheck disable=SC2154 + #[[ $put_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $put_object_error" + + if delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password"; then + fail "able to delete object with hold" + fi + # shellcheck disable=SC2154 + [[ $delete_object_error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $delete_object_error" + put_object_legal_hold "$BUCKET_ONE_NAME" "$bucket_file" "OFF" || fail "error removing legal hold on object" + delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$password" || fail "error deleting object after removing legal hold" + + delete_bucket_recursive "s3api" "$BUCKET_ONE_NAME" +} + +test_get_put_object_retention_aws_root() { + # bucket must be created with lock for legal hold + if [[ $RECREATE_BUCKETS == false ]]; then + return + fi + + bucket_file="bucket_file" + username="ABCDEFG" + secret_key="HIJKLMN" + + legal_hold_retention_setup "$username" "$secret_key" "$bucket_file" + + get_object_lock_configuration "$BUCKET_ONE_NAME" || fail "error getting lock configuration" + log 5 "$lock_config" + enabled=$(echo "$lock_config" | jq -r ".ObjectLockConfiguration.ObjectLockEnabled") + [[ $enabled == "Enabled" ]] || fail "ObjectLockEnabled should be 'Enabled', is '$enabled'" + + if [[ "$OSTYPE" == "darwin"* ]]; then + retention_date=$(date -v+2d +"%Y-%m-%dT%H:%M:%S") + else + retention_date=$(date -d "+2 days" +"%Y-%m-%dT%H:%M:%S") + fi + put_object_retention "$BUCKET_ONE_NAME" "$bucket_file" "GOVERNANCE" "$retention_date" || fail "failed to add object retention" + get_object_retention "$BUCKET_ONE_NAME" "$bucket_file" || fail "failed to get object retention" + log 5 "$retention" + retention=$(echo "$retention" | grep -v "InsecureRequestWarning") + mode=$(echo "$retention" | jq -r ".Retention.Mode") + retain_until_date=$(echo "$retention" | jq -r ".Retention.RetainUntilDate") + [[ $mode == "GOVERNANCE" ]] || fail "retention mode should be governance, is $mode" + [[ $retain_until_date == "$retention_date"* ]] || fail "retain until date should be $retention_date, is $retain_until_date" + + echo "fdkljafajkfs" > "$test_file_folder/$bucket_file" + put_object_with_user "s3api" "$test_file_folder/$bucket_file" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || local put_result=$? + [[ $put_result -ne 0 ]] || fail "able to overwrite object with hold" + # shellcheck disable=SC2154 + [[ $error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $error" + + delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$bucket_file" "$username" "$secret_key" || local delete_result=$? + [[ $delete_result -ne 0 ]] || fail "able to delete object with hold" + [[ $error == *"Object is WORM protected and cannot be overwritten"* ]] || fail "unexpected error message: $error" + + delete_object "s3api" "$BUCKET_ONE_NAME" "$bucket_file" || fail "error deleting object" + delete_bucket_recursive "s3api" "$BUCKET_ONE_NAME" +} + +legal_hold_retention_setup() { + [[ $# -eq 3 ]] || fail "legal hold or retention setup requires username, secret key, bucket file" + + delete_bucket_or_contents_if_exists "s3api" "$BUCKET_ONE_NAME" || fail "error deleting bucket, or checking for existence" + setup_user "$1" "$2" "user" || fail "error creating user if nonexistent" + create_test_files "$3" || fail "error creating test files" + + #create_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error creating bucket" + create_bucket_object_lock_enabled "$BUCKET_ONE_NAME" || fail "error creating bucket" + change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$1" || fail "error changing bucket ownership" + get_bucket_policy "s3api" "$BUCKET_ONE_NAME" || fail "error getting bucket policy" + # shellcheck disable=SC2154 + log 5 "POLICY: $bucket_policy" + get_bucket_owner "$BUCKET_ONE_NAME" + # shellcheck disable=SC2154 + log 5 "owner: $bucket_owner" + #put_bucket_ownership_controls "$BUCKET_ONE_NAME" "BucketOwnerPreferred" || fail "error putting bucket ownership controls" + put_object_with_user "s3api" "$test_file_folder/$3" "$BUCKET_ONE_NAME" "$3" "$1" "$2" || fail "failed to add object to bucket" +} + +test_s3api_list_objects_v1_aws_root() { + local object_one="test-file-one" + local object_two="test-file-two" + local object_two_data="test data\n" + + create_test_files "$object_one" "$object_two" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating test files" + printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two" + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? + [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" + put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local copy_result_one=$? + [[ $copy_result_one -eq 0 ]] || fail "Failed to add object $object_one" + put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_result_two=$? + [[ $copy_result_two -eq 0 ]] || fail "Failed to add object $object_two" + + list_objects_s3api_v1 "$BUCKET_ONE_NAME" + # shellcheck disable=SC2154 + key_one=$(echo "$objects" | jq -r '.Contents[0].Key') + [[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)" + size_one=$(echo "$objects" | jq -r '.Contents[0].Size') + [[ $size_one -eq 0 ]] || fail "Object one size mismatch ($size_one, 0)" + key_two=$(echo "$objects" | jq -r '.Contents[1].Key') + [[ $key_two == "$object_two" ]] || fail "Object two mismatch ($key_two, $object_two)" + size_two=$(echo "$objects" | jq '.Contents[1].Size') + [[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch ($size_two, ${#object_two_data})" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files "$object_one" "$object_two" +} + +test_s3api_list_objects_v2_aws_root() { + local object_one="test-file-one" + local object_two="test-file-two" + local object_two_data="test data\n" + + create_test_files "$object_one" "$object_two" || local created=$? + [[ $created -eq 0 ]] || fail "Error creating test files" + printf "%s" "$object_two_data" > "$test_file_folder"/"$object_two" + setup_bucket "aws" "$BUCKET_ONE_NAME" || local result=$? + [[ $result -eq 0 ]] || fail "Failed to create bucket '$BUCKET_ONE_NAME'" + put_object "s3api" "$test_file_folder"/"$object_one" "$BUCKET_ONE_NAME" "$object_one" || local copy_object_one=$? + [[ $copy_object_one -eq 0 ]] || fail "Failed to add object $object_one" + put_object "s3api" "$test_file_folder"/"$object_two" "$BUCKET_ONE_NAME" "$object_two" || local copy_object_two=$? + [[ $copy_object_two -eq 0 ]] || fail "Failed to add object $object_two" + + list_objects_s3api_v2 "$BUCKET_ONE_NAME" + key_one=$(echo "$objects" | jq -r '.Contents[0].Key') + [[ $key_one == "$object_one" ]] || fail "Object one mismatch ($key_one, $object_one)" + size_one=$(echo "$objects" | jq -r '.Contents[0].Size') + [[ $size_one -eq 0 ]] || fail "Object one size mismatch ($size_one, 0)" + key_two=$(echo "$objects" | jq -r '.Contents[1].Key') + [[ $key_two == "$object_two" ]] || fail "Object two mismatch ($key_two, $object_two)" + size_two=$(echo "$objects" | jq -r '.Contents[1].Size') + [[ $size_two -eq ${#object_two_data} ]] || fail "Object two size mismatch ($size_two, ${#object_two_data})" + + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files "$object_one" "$object_two" +} + +test_multipart_upload_list_parts_aws_root() { + local bucket_file="bucket-file" + + create_test_files "$bucket_file" || fail "error creating test file" + dd if=/dev/urandom of="$test_file_folder/$bucket_file" bs=5M count=1 || fail "error creating test file" + setup_bucket "aws" "$BUCKET_ONE_NAME" || fail "failed to create bucket '$BUCKET_ONE_NAME'" + + list_parts "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder"/"$bucket_file" 4 || fail "listing multipart upload parts failed" + + declare -a parts_map + # shellcheck disable=SC2154 + log 5 "parts: $parts" + for i in {0..3}; do + local part_number + local etag + # shellcheck disable=SC2154 + part=$(echo "$parts" | grep -v "InsecureRequestWarning" | jq -r ".[$i]" 2>&1) || fail "error getting part: $part" + part_number=$(echo "$part" | jq ".PartNumber" 2>&1) || fail "error parsing part number: $part_number" + [[ $part_number != "" ]] || fail "error: blank part number" + + etag=$(echo "$part" | jq ".ETag" 2>&1) || fail "error parsing etag: $etag" + [[ $etag != "" ]] || fail "error: blank etag" + # shellcheck disable=SC2004 + parts_map[$part_number]=$etag + done + [[ ${#parts_map[@]} -ne 0 ]] || fail "error loading multipart upload parts to check" + + for i in {0..3}; do + local part_number + local etag + # shellcheck disable=SC2154 + listed_part=$(echo "$listed_parts" | grep -v "InsecureRequestWarning" | jq -r ".Parts[$i]" 2>&1) || fail "error parsing listed part: $listed_part" + part_number=$(echo "$listed_part" | jq ".PartNumber" 2>&1) || fail "error parsing listed part number: $part_number" + etag=$(echo "$listed_part" | jq ".ETag" 2>&1) || fail "error getting listed etag: $etag" + [[ ${parts_map[$part_number]} == "$etag" ]] || fail "error: etags don't match (part number: $part_number, etags ${parts_map[$part_number]},$etag)" + done + + run_then_abort_multipart_upload "$BUCKET_ONE_NAME" "$bucket_file" "$test_file_folder/$bucket_file" 4 + delete_bucket_or_contents "aws" "$BUCKET_ONE_NAME" + delete_test_files $bucket_file +} diff --git a/tests/test_common.sh b/tests/test_common.sh index e247fb97..d4cbbb22 100644 --- a/tests/test_common.sh +++ b/tests/test_common.sh @@ -5,6 +5,7 @@ source ./tests/util.sh source ./tests/util_file.sh source ./tests/util_policy.sh source ./tests/commands/copy_object.sh +source ./tests/commands/delete_bucket_tagging.sh source ./tests/commands/delete_object_tagging.sh source ./tests/commands/get_bucket_acl.sh source ./tests/commands/get_bucket_location.sh @@ -282,7 +283,7 @@ test_common_set_get_delete_bucket_tags() { [[ $tag_set_key == "$key" ]] || fail "Key mismatch" [[ $tag_set_value == "$value" ]] || fail "Value mismatch" fi - delete_bucket_tags "$1" "$BUCKET_ONE_NAME" + delete_bucket_tagging "$1" "$BUCKET_ONE_NAME" get_bucket_tagging "$1" "$BUCKET_ONE_NAME" || fail "Error getting bucket tags third time" diff --git a/tests/test_user_aws.sh b/tests/test_user_aws.sh index 5e6a8340..d3f56baa 100755 --- a/tests/test_user_aws.sh +++ b/tests/test_user_aws.sh @@ -2,6 +2,8 @@ source ./tests/test_user_common.sh source ./tests/util_users.sh +source ./tests/commands/get_object.sh +source ./tests/commands/put_object.sh export RUN_USERS=true @@ -26,3 +28,88 @@ export RUN_USERS=true @test "test_userplus_operation_aws" { test_userplus_operation "aws" } + +@test "test_user_get_object" { + username="ABCDEFG" + password="HIJKLMN" + test_file="test_file" + + setup_user "$username" "$password" "user" || fail "error creating user if nonexistent" + create_test_files "$test_file" || fail "error creating test files" + + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket" + if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password"; then + fail "able to get object despite not being bucket owner" + fi + change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership" + put_object "s3api" "$test_file_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "failed to add object to bucket" + get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password" || fail "error getting object" +} + +@test "test_userplus_get_object" { + username="ABCDEFG" + password="HIJKLMN" + test_file="test_file" + + setup_user "$username" "$password" "admin" || fail "error creating user if nonexistent" + create_test_files "$test_file" || fail "error creating test files" + + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket" + if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password"; then + fail "able to get object despite not being bucket owner" + fi + change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership" + put_object "s3api" "$test_file_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "failed to add object to bucket" + get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password" || fail "error getting object" +} + +@test "test_user_delete_object" { + username="ABCDEFG" + password="HIJKLMN" + test_file="test_file" + + setup_user "$username" "$password" "user" || fail "error creating user if nonexistent" + create_test_files "$test_file" || fail "error creating test files" + + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket" + if get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password"; then + fail "able to get object despite not being bucket owner" + fi + change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership" + put_object "s3api" "$test_file_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" || fail "failed to add object to bucket" + delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password" || fail "error deleting object" +} + +@test "test_admin_put_get_object" { + username="ABCDEFG" + password="HIJKLMN" + test_file="test_file" + + setup_user "$username" "$password" "admin" || fail "error creating user if nonexistent" + create_test_file_with_size "$test_file" 10 || fail "error creating test file" + + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket" + put_object_with_user "s3api" "$test_file_folder/$test_file" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password" || fail "failed to add object to bucket" + get_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy" "$username" "$password" || fail "error getting object" + compare_files "$test_file_folder/$test_file" "$test_file_folder/$test_file-copy" || fail "files don't match" + delete_object_with_user "s3api" "$BUCKET_ONE_NAME" "$test_file" "$username" "$password" || fail "error deleting object" + if get_object "s3api" "$BUCKET_ONE_NAME" "$test_file" "$test_file_folder/$test_file-copy"; then + fail "file not successfully deleted" + fi + # shellcheck disable=SC2154 + [[ "$get_object_error" == *"NoSuchKey"* ]] || fail "unexpected error message: $get_object_error" + delete_bucket_or_contents "s3api" "$BUCKET_ONE_NAME" + delete_test_files "$test_file" "$test_file-copy" +} + +@test "test_user_create_multipart_upload" { + username="ABCDEFG" + password="HIJKLMN" + test_file="test_file" + + setup_user "$username" "$password" "user" || fail "error creating user if nonexistent" + create_large_file "$test_file" || fail "error creating test file" + setup_bucket "s3api" "$BUCKET_ONE_NAME" || fail "error setting up bucket" + change_bucket_owner "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" "$BUCKET_ONE_NAME" "$username" || fail "error changing bucket ownership" + create_multipart_upload_with_user "$BUCKET_ONE_NAME" "dummy" "$username" "$password" || fail "unable to create multipart upload" +} diff --git a/tests/util.sh b/tests/util.sh index 4b1c8a6e..4fa5cd31 100644 --- a/tests/util.sh +++ b/tests/util.sh @@ -10,6 +10,7 @@ source ./tests/commands/create_bucket.sh source ./tests/commands/delete_bucket.sh source ./tests/commands/delete_bucket_policy.sh source ./tests/commands/delete_object.sh +source ./tests/commands/get_bucket_acl.sh source ./tests/commands/get_bucket_ownership_controls.sh source ./tests/commands/get_bucket_tagging.sh source ./tests/commands/get_object_tagging.sh @@ -156,7 +157,7 @@ delete_bucket_or_contents() { log 2 "error deleting bucket contents" return 1 fi - if ! delete_bucket_policy "s3api" "$2"; then + if ! delete_bucket_policy "$1" "$2"; then log 2 "error deleting bucket policies" return 1 fi @@ -165,6 +166,11 @@ delete_bucket_or_contents() { return 1 fi # shellcheck disable=SC2154 + #if [[ "$object_ownership_rule" != "BucketOwnerEnforced" ]]; then + # get_bucket_acl "$1" "$2" || fail "error getting bucket acl" + # log 5 "ACL: $acl" + #fi + log 5 "object ownership rule: $object_ownership_rule" if [[ "$object_ownership_rule" != "BucketOwnerEnforced" ]] && ! put_bucket_canned_acl "$2" "private"; then log 2 "error resetting bucket ACLs" return 1 @@ -192,8 +198,7 @@ delete_bucket_or_contents_if_exists() { return 1 fi if [[ $bucket_exists_result -eq 0 ]]; then - delete_bucket_or_contents "$1" "$2" || local delete_result=$? - if [[ delete_result -ne 0 ]]; then + if ! delete_bucket_or_contents "$1" "$2"; then log 2 "error deleting bucket or contents" return 1 fi @@ -247,16 +252,16 @@ setup_bucket() { # return 0 for true, 1 for false, 2 for error object_exists() { if [ $# -ne 3 ]; then - echo "object exists check missing command, bucket name, object name" + log 2 "object exists check missing command, bucket name, object name" return 2 fi - head_object "$1" "$2" "$3" || head_result=$? - if [[ $head_result -eq 2 ]]; then - echo "error checking if object exists" + head_object "$1" "$2" "$3" || local head_object_result=$? + if [[ $head_object_result -eq 2 ]]; then + log 2 "error checking if object exists" return 2 fi # shellcheck disable=SC2086 - return $head_result + return $head_object_result } put_object_with_metadata() { @@ -536,23 +541,6 @@ check_bucket_tags_empty() { return $check_result } -delete_bucket_tags() { - if [ $# -ne 2 ]; then - echo "delete bucket tag command missing command type, bucket name" - return 1 - fi - local result - if [[ $1 == 'aws' ]]; then - tags=$(aws --no-verify-ssl s3api delete-bucket-tagging --bucket "$2" 2>&1) || result=$? - elif [[ $1 == 'mc' ]]; then - tags=$(mc --insecure tag remove "$MC_ALIAS"/"$2" 2>&1) || result=$? - else - echo "invalid command type $1" - return 1 - fi - return 0 -} - # add tags to object # params: object, key, value # return: 0 for success, 1 for error diff --git a/tests/util_file.sh b/tests/util_file.sh index 97ce316e..7d500ba5 100644 --- a/tests/util_file.sh +++ b/tests/util_file.sh @@ -15,14 +15,30 @@ create_test_files() { create_test_file_folder fi for name in "$@"; do - touch "$test_file_folder"/"$name" || local touch_result=$? - if [[ $touch_result -ne 0 ]]; then - echo "error creating file $name" + if [[ -e "$test_file_folder/$name" ]]; then + error=$(rm "$test_file_folder/$name" 2>&1) || fail "error removing existing test file: $error" fi + error=$(touch "$test_file_folder"/"$name" 2>&1) || fail "error creating new file: $error" done export test_file_folder } +create_test_file_with_size() { + if [ $# -ne 2 ]; then + log 2 "'create test file with size' function requires name, size" + return 1 + fi + if ! create_test_file_folder "$1"; then + log 2 "error creating test file" + return 1 + fi + if ! error=$(dd if=/dev/urandom of="$test_file_folder"/"$1" bs=1 count="$2" 2>&1); then + log 2 "error writing file data: $error" + return 1 + fi + return 0 +} + create_test_folder() { if [ $# -lt 1 ]; then echo "create test folder command missing folder name" @@ -110,9 +126,11 @@ create_test_file_folder() { else test_file_folder=$PWD/versity-gwtest fi - mkdir -p "$test_file_folder" || local mkdir_result=$? - if [[ $mkdir_result -ne 0 ]]; then - echo "error creating test file folder" + if ! error=$(mkdir -p "$test_file_folder" 2>&1); then + if [[ $error != *"File exists"* ]]; then + log 2 "error creating test file folder: $error" + return 1 + fi fi export test_file_folder }