From f6a3fe4c4210d3e5a6399abeded546999fb89727 Mon Sep 17 00:00:00 2001 From: Luke McCrone Date: Fri, 26 Jan 2024 12:33:24 -0300 Subject: [PATCH] remove "direct" parameter --- .github/workflows/system.yml | 2 +- tests/.env.default | 2 +- tests/.env.s3 | 6 ----- tests/.env.versitygw | 1 - tests/README.md | 7 +++--- tests/s3_bucket_tests.sh | 48 ++++++------------------------------ tests/tests.sh | 11 ++------- 7 files changed, 15 insertions(+), 62 deletions(-) delete mode 100644 tests/.env.s3 diff --git a/.github/workflows/system.yml b/.github/workflows/system.yml index a4f0a70c8..db43c64a8 100644 --- a/.github/workflows/system.yml +++ b/.github/workflows/system.yml @@ -29,7 +29,7 @@ jobs: git clone https://github.com/bats-core/bats-core.git cd bats-core && ./install.sh $HOME - #- name: Install AWS + #- name: Install AWS (local only) # if: ${{ env.GITHUB_ACTIONS_RUNNER }} == '' # uses: chrislennon/action-aws-cli@v1.1 diff --git a/tests/.env.default b/tests/.env.default index 77480e576..6adb2c671 100644 --- a/tests/.env.default +++ b/tests/.env.default @@ -2,4 +2,4 @@ AWS_REGION=us-west-2 AWS_PROFILE=versity VERSITY_EXE=./versitygw BACKEND=posix -DIRECT=0 \ No newline at end of file +LOCAL_FOLDER=/tmp/gw \ No newline at end of file diff --git a/tests/.env.s3 b/tests/.env.s3 deleted file mode 100644 index 3573aa971..000000000 --- a/tests/.env.s3 +++ /dev/null @@ -1,6 +0,0 @@ -AWS_REGION=us-west-2 -AWS_PROFILE=versity -VERSITY_EXE=./versitygw -BACKEND=posix -DIRECT=1 -LOCAL_FOLDER=/tmp/gw \ No newline at end of file diff --git a/tests/.env.versitygw b/tests/.env.versitygw index dd80b095a..132927b66 100644 --- a/tests/.env.versitygw +++ b/tests/.env.versitygw @@ -2,5 +2,4 @@ AWS_REGION=us-east-1 AWS_PROFILE=versity VERSITY_EXE=./versitygw BACKEND=posix -DIRECT=0 LOCAL_FOLDER=/tmp/gw \ No newline at end of file diff --git a/tests/README.md b/tests/README.md index 7141b3c7b..351b393b1 100644 --- a/tests/README.md +++ b/tests/README.md @@ -4,11 +4,10 @@ Instructions: 1. Build the `versitygw` binary. 2. Create a local AWS profile for connection to S3, and add the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` values above to the profile. 3. Create an environment file (`.env`) similar to the ones in this folder, setting the `AWS_PROFILE` parameter to the name of the profile you created. -4. Set the `DIRECT` parameter to `0` to communicate via versitygw, or `1` to communicate directly with S3. -5. In the root repo folder, run with `VERSITYGW_TEST_ENV= tests/s3_bucket_tests.sh`. -6. If running/testing the GitHub workflow, create a `.secrets` file, and set the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` parameters here to the values of your AWS S3 IAM account. +4. In the root repo folder, run with `VERSITYGW_TEST_ENV= tests/s3_bucket_tests.sh`. +5. If running/testing the GitHub workflow locally, create a `.secrets` file, and set the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` parameters here to the values of your AWS S3 IAM account. ``` AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= ``` -7. To run the workflow locally, install **act** and run with `act -W .github/workflows/system.yml`. \ No newline at end of file +6. To run the workflow locally, install **act** and run with `act -W .github/workflows/system.yml`. \ No newline at end of file diff --git a/tests/s3_bucket_tests.sh b/tests/s3_bucket_tests.sh index 0472d00c5..52ca2be45 100755 --- a/tests/s3_bucket_tests.sh +++ b/tests/s3_bucket_tests.sh @@ -9,11 +9,7 @@ create_bucket() { fi local exit_code=0 local error - if $direct; then - error=$(aws s3 mb s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - else - error=$(aws --endpoint-url http://127.0.0.1:7070 s3 mb s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - fi + error=$(aws --endpoint-url http://127.0.0.1:7070 s3 mb s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error creating bucket: $error" return 1 @@ -28,11 +24,7 @@ delete_bucket() { fi local exit_code=0 local error - if $direct; then - error=$(aws s3 rb s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" - else - error=$(aws --endpoint-url http://127.0.0.1:7070 s3 rb s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" - fi + error=$(aws --endpoint-url http://127.0.0.1:7070 s3 rb s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then if [[ "$error" == *"The specified bucket does not exist"* ]]; then return 0 @@ -52,11 +44,7 @@ bucket_exists() { fi local exit_code=0 local error - if $direct; then - error=$(aws s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" - else - error=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" - fi + error=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then if [[ "$error" == *"The specified bucket does not exist"* ]] || [[ "$error" == *"Access Denied"* ]]; then return 1 @@ -98,11 +86,7 @@ object_exists() { fi local exit_code=0 local error - if $direct; then - error=$(aws s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" - else - error=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" - fi + error=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code="$?" if [ $exit_code -ne 0 ]; then if [[ "$error" == "" ]]; then return 1 @@ -121,11 +105,7 @@ put_object() { fi local exit_code=0 local error - if $direct; then - error=$(aws s3 cp "$1" s3://"$2" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - else - error=$(aws --endpoint-url http://127.0.0.1:7070 s3 cp "$1" s3://"$2" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - fi + error=$(aws --endpoint-url http://127.0.0.1:7070 s3 cp "$1" s3://"$2" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error copying object to bucket: $error" return 1 @@ -160,11 +140,7 @@ delete_object() { fi local exit_code=0 local error - if $direct; then - error=$(aws s3 rm s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - else - error=$(aws --endpoint-url http://127.0.0.1:7070 s3 rm s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - fi + error=$(aws --endpoint-url http://127.0.0.1:7070 s3 rm s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error deleting object: $error" return 1 @@ -175,11 +151,7 @@ delete_object() { list_buckets() { local exit_code=0 local output - if $direct; then - output=$(aws s3 ls --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - else - output=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - fi + output=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error listing buckets: $output" return 1 @@ -201,11 +173,7 @@ list_objects() { fi local exit_code=0 local output - if $direct; then - output=$(aws s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - else - output=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? - fi + output=$(aws --endpoint-url http://127.0.0.1:7070 s3 ls s3://"$1" --region "$AWS_REGION" --profile "$AWS_PROFILE" 2>&1) || exit_code=$? if [ $exit_code -ne 0 ]; then echo "error listing objects: $output" return 1 diff --git a/tests/tests.sh b/tests/tests.sh index 21d50dfcd..bfe73235d 100644 --- a/tests/tests.sh +++ b/tests/tests.sh @@ -40,16 +40,9 @@ setup() { echo "No local storage folder set" return 1 fi - if [ -z "$DIRECT" ] || [ "$DIRECT" -eq 0 ]; then - direct=false - ROOT_ACCESS_KEY="$AWS_ACCESS_KEY_ID" ROOT_SECRET_KEY="$AWS_SECRET_ACCESS_KEY" "$VERSITY_EXE" "$BACKEND" "$LOCAL_FOLDER" & - versitygw_pid=$! - else - direct=true - fi + ROOT_ACCESS_KEY="$AWS_ACCESS_KEY_ID" ROOT_SECRET_KEY="$AWS_SECRET_ACCESS_KEY" "$VERSITY_EXE" "$BACKEND" "$LOCAL_FOLDER" & + versitygw_pid=$! export versitygw_pid - export direct - #echo "$VERSITYGW_TEST_ENV $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $VERSITY_EXE $BACKEND $DIRECT $AWS_REGION" } fail() {