Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 108 additions & 0 deletions .github/scripts/gcs/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SHELL = bash
TMP_DIR := /tmp/storage-cli-gcs-$(or $(GITHUB_RUN_ID),$(USER))
default: test-int

# Ensure tmp directory exists
create-tmp-dir:
@mkdir -p "$(TMP_DIR)"

# Generate a $StorageClass.lock which contains our bucket name
# used for testing. Buckets must be unique among all in GCS,
# we cannot simply hardcode a bucket.
.PHONY: FORCE
regional.lock: create-tmp-dir
@test -s "$(TMP_DIR)/regional.lock" || \
echo "gcs-$$(openssl rand -hex 20)" > "$(TMP_DIR)/regional.lock"

# Create a bucket using the name located in $StorageClass.lock with
# a sane location.
regional-bucket: regional.lock
@gsutil ls | grep -q "$$(cat $(TMP_DIR)/regional.lock)"; if [ $$? -ne 0 ]; then \
gsutil mb -c REGIONAL -l us-east1 "gs://$$(cat $(TMP_DIR)/regional.lock)"; \
fi

.PHONY: FORCE
multiregional.lock: create-tmp-dir
@test -s "$(TMP_DIR)/multiregional.lock" || \
echo "gcs-$$(openssl rand -hex 20)" > "$(TMP_DIR)/multiregional.lock"

multiregional-bucket: multiregional.lock
@gsutil ls | grep -q "$$(cat $(TMP_DIR)/multiregional.lock)"; if [ $$? -ne 0 ]; then \
gsutil mb -c MULTI_REGIONAL -l us "gs://$$(cat $(TMP_DIR)/multiregional.lock)"; \
fi

.PHONY: FORCE
public.lock: create-tmp-dir
@test -s "$(TMP_DIR)/public.lock" || \
echo "gcs-$$(openssl rand -hex 20)" > "$(TMP_DIR)/public.lock"


public-bucket: public.lock
@gsutil ls | grep -q "$$(cat $(TMP_DIR)/public.lock)"; if [ $$? -ne 0 ]; then \
gsutil mb -c MULTI_REGIONAL -l us "gs://$$(cat $(TMP_DIR)/public.lock)" && \
gsutil iam ch allUsers:legacyObjectReader "gs://$$(cat $(TMP_DIR)/public.lock)" && \
gsutil iam ch allUsers:legacyBucketReader "gs://$$(cat $(TMP_DIR)/public.lock)" && \
echo "waiting for IAM to propagate" && \
until curl -s \
"https://storage.googleapis.com/$$(cat $(TMP_DIR)/public.lock)/non-existent" \
| grep -q "NoSuchKey"; do sleep 1; done; \
fi

# Create all buckets necessary for the test.
prep-gcs: regional-bucket multiregional-bucket public-bucket

# Remove all buckets listed in $StorageClass.lock files.
clean-gcs:
@test -s "$(TMP_DIR)/multiregional.lock" && test -s "$(TMP_DIR)/regional.lock" && test -s "$(TMP_DIR)/public.lock"
@gsutil rm "gs://$$(cat $(TMP_DIR)/regional.lock)/*" || true
@gsutil rb "gs://$$(cat $(TMP_DIR)/regional.lock)"
@rm "$(TMP_DIR)/regional.lock"
@gsutil rm "gs://$$(cat $(TMP_DIR)/multiregional.lock)/*" || true
@gsutil rb "gs://$$(cat $(TMP_DIR)/multiregional.lock)"
@rm "$(TMP_DIR)/multiregional.lock"
@gsutil rm "gs://$$(cat $(TMP_DIR)/public.lock)/*" || true
@gsutil rb "gs://$$(cat $(TMP_DIR)/public.lock)"
@rm "$(TMP_DIR)/public.lock"
@rmdir "$(TMP_DIR)" 2>/dev/null || true

# Perform only unit tests
test-unit:
cd ../../.. && go run github.com/onsi/ginkgo/v2/ginkgo --skip-package=integration ./gcs/...

# Perform all tests, including integration tests.
test-int:
export MULTIREGIONAL_BUCKET_NAME="$$(cat $(TMP_DIR)/multiregional.lock)" && \
export REGIONAL_BUCKET_NAME="$$(cat $(TMP_DIR)/regional.lock)" && \
export PUBLIC_BUCKET_NAME="$$(cat $(TMP_DIR)/public.lock)" && \
cd ../../.. && go run github.com/onsi/ginkgo/v2/ginkgo gcs/integration/

# Perform all non-long tests, including integration tests.
test-fast-int:
export MULTIREGIONAL_BUCKET_NAME="$$(cat $(TMP_DIR)/multiregional.lock)" && \
export REGIONAL_BUCKET_NAME="$$(cat $(TMP_DIR)/regional.lock)" && \
export PUBLIC_BUCKET_NAME="$$(cat $(TMP_DIR)/public.lock)" && \
export SKIP_LONG_TESTS="yes" && \
cd ../../.. && go run github.com/onsi/ginkgo/v2/ginkgo gcs/integration/

help:
@echo " prep-gcs: create external GCS buckets needed for integration testing"
@echo " clean-gcs: remove external GCS buckets"
@echo " test-fast-int: run an reduced integration test suite (presubmit)"
@echo " test-int: run the full integration test (CI only)"
@echo " test-unit: run unit tests"
@echo ""
@echo "expected environment variables:"
@echo " GOOGLE_SERVICE_ACCOUNT=contents of a JSON service account key"
28 changes: 28 additions & 0 deletions .github/scripts/gcs/run-int.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/usr/bin/env bash
set -euo pipefail


# Get the directory where this script is located
script_dir="$( cd "$(dirname "${0}")" && pwd )"
repo_root="$(cd "${script_dir}/../../.." && pwd)"

: "${google_json_key_data:?}"

export GOOGLE_SERVICE_ACCOUNT="${google_json_key_data}"

pushd "${script_dir}" > /dev/null
source utils.sh
gcloud_login
popd > /dev/null

pushd "${script_dir}"

# Set up conditional long test execution
if [[ "${SKIP_LONG_TESTS:-}" == "yes" ]]; then
make test-fast-int
else
make test-int
fi
popd


16 changes: 16 additions & 0 deletions .github/scripts/gcs/setup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -euo pipefail


# Get the directory where this script is located
script_dir="$( cd "$(dirname "${0}")" && pwd )"
repo_root="$(cd "${script_dir}/../../.." && pwd)"


: "${google_json_key_data:?}"

pushd "${script_dir}"
source utils.sh
gcloud_login
make prep-gcs
popd
16 changes: 16 additions & 0 deletions .github/scripts/gcs/teardown.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -euo pipefail


# Get the directory where this script is located
script_dir="$( cd "$(dirname "${0}")" && pwd )"
repo_root="$(cd "${script_dir}/../../.." && pwd)"


: "${google_json_key_data:?}"

pushd "${script_dir}"
source utils.sh
gcloud_login
make clean-gcs
popd
72 changes: 72 additions & 0 deletions .github/scripts/gcs/utils.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
#!/usr/bin/env bash

check_param() {
local name=$1
local value
value=$(eval echo '$'"${name}")
if [ "${value}" == 'replace-me' ]; then
echo "environment variable ${name} must be set"
exit 1
fi
}

print_git_state() {
echo "--> last commit..."
TERM=xterm-256color git log -1
echo "---"
echo "--> local changes (e.g., from 'fly execute')..."
TERM=xterm-256color git status --verbose
echo "---"
}

declare -a on_exit_items
on_exit_items=()

function on_exit {
echo "Running ${#on_exit_items[@]} on_exit items..."
for i in "${on_exit_items[@]}"
do
for try in $(seq 0 9); do
sleep "${try}"
echo "Running cleanup command $i (try: ${try})"
eval "${i}" || continue
break
done
done
}

function add_on_exit {
local n=${#on_exit_items[@]}
on_exit_items=("${on_exit_items[@]}" "$*")
if [[ $n -eq 0 ]]; then
trap on_exit EXIT
fi
}

function clean_gcs {
pushd "${release_dir}" || return
make clean-gcs
popd || return
}

function set_env {
my_dir=$(dirname "$(readlink -f "${0}")")
export release_dir
release_dir="$( cd "${my_dir}" && cd ../.. && pwd )"
export workspace_dir
workspace_dir="$( cd "${release_dir}" && cd .. && pwd )"

go_bin=$(go env GOPATH)
export PATH=${go_bin}/bin:${PATH}
}


function gcloud_login {
check_param 'google_json_key_data'
google_project=$(echo ${google_json_key_data} | jq -r .project_id)

keyfile=$(mktemp)
echo "${google_json_key_data:-}" > "${keyfile}"
gcloud auth activate-service-account --key-file="${keyfile}"
gcloud config set project "${google_project:-}"
}
70 changes: 70 additions & 0 deletions .github/workflows/gcs-integration.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
name: GCS Integration Tests

on:
pull_request:
paths:
- ".github/workflows/gcs-integration.yml"
- "gcs/**"

jobs:
gcs-integration-fast-tests:
name: GCS Integation Fast Tests
runs-on: ubuntu-latest
environment: gcs-integration
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Go
uses: actions/setup-go@v6
with:
go-version-file: go.mod
- name: Install Ginkgo
run: go install github.com/onsi/ginkgo/v2/ginkgo@latest
- name: Setup GCS Test Environment
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_BASE64 }}" | base64 -d > /tmp/gcp-key.json
export google_json_key_data="$(cat /tmp/gcp-key.json)"
./.github/scripts/gcs/setup.sh
- name: Run Fast Tests
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_BASE64 }}" | base64 -d > /tmp/gcp-key.json
export google_json_key_data="$(cat /tmp/gcp-key.json)"
export SKIP_LONG_TESTS=yes
./.github/scripts/gcs/run-int.sh
- name: Teardown GCS Test Environment
if: always()
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_BASE64 }}" | base64 -d > /tmp/gcp-key.json
export google_json_key_data="$(cat /tmp/gcp-key.json)"
./.github/scripts/gcs/teardown.sh

gcs-integration-all-tests:
name: GCS Integation All Tests
runs-on: ubuntu-latest
environment: gcs-integration
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Go
uses: actions/setup-go@v6
with:
go-version-file: go.mod
- name: Install Ginkgo
run: go install github.com/onsi/ginkgo/v2/ginkgo@latest
- name: Setup GCS Test Environment
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_BASE64 }}" | base64 -d > /tmp/gcp-key.json
export google_json_key_data="$(cat /tmp/gcp-key.json)"
./.github/scripts/gcs/setup.sh
- name: Run All Tests
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_BASE64 }}" | base64 -d > /tmp/gcp-key.json
export google_json_key_data="$(cat /tmp/gcp-key.json)"
./.github/scripts/gcs/run-int.sh
- name: Teardown GCS Test Environment
if: always()
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_BASE64 }}" | base64 -d > /tmp/gcp-key.json
export google_json_key_data="$(cat /tmp/gcp-key.json)"
./.github/scripts/gcs/teardown.sh

51 changes: 13 additions & 38 deletions gcs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,41 +48,16 @@ The command line tool expects a JSON configuration file. Run `storage-cli-gcs --
will be used if they exist (either through `gcloud auth application-default login` or a [service account](https://cloud.google.com/iam/docs/understanding-service-accounts)).
If they don't exist the client will fall back to `none` behavior.

## Running Integration Tests

1. Ensure [gcloud](https://cloud.google.com/sdk/downloads) is installed and you have authenticated (`gcloud auth login`).
These credentials will be used by the Makefile to create/destroy Google Cloud Storage buckets for testing.
1. Set the Google Cloud project: `gcloud config set project <your project>`
1. Generate a service account with the `Storage Admin` role for your project and set the contents as
the environment variable `GOOGLE_APPLICATION_CREDENTIALS`, for example:
```bash
export project_id=$(gcloud config get-value project)

export service_account_name=storage-cli-gcs-integration-tests
export service_account_email=${service_account_name}@${project_id}.iam.gserviceaccount.com
credentials_file=$(mktemp)

gcloud config set project ${project_id}
gcloud iam service-accounts create ${service_account_name} --display-name "Integration Test Access for storage-cli-gcs "
gcloud iam service-accounts keys create ${credentials_file} --iam-account ${service_account_email}
gcloud project add-iam-policy-binding ${project_id} --member serviceAccount:${service_account_email} --role roles/storage.admin

export GOOGLE_SERVICE_ACCOUNT="$(cat ${credentials_file})"
export GOOGLE_APPLICATION_CREDENTIALS="$(cat ${credentials_file})"
export LC_ALL=C # fix `tr` complaining about "illegal byte sequence" on OSX
```
1. Run the unit and fast integration tests: `make test-fast-int`
1. Clean up buckets: `make clean-gcs`

## Development

* A Makefile is provided that automates integration testing. Try `make help` to get started.
* [gvt](https://godoc.org/github.com/FiloSottile/gvt) is used for vendoring.

## Contributing

For details on how to contribute to this project - including filing bug reports and contributing code changes - please see [CONTRIBUTING.md](./CONTRIBUTING.md).

## License

This tool is licensed under Apache 2.0. Full license text is available in [LICENSE](LICENSE).
## Running Tests
## Unit Tests
1. Use the command `make -C .github/scripts/gcs test-unit`

## Integration Tests
1. Create a service account with the `Storage Admin` role.
1. Create a new key for your service account and download credential as JSON file.
1. Export json content with `export google_json_key_data="$(cat <path-to-json-file.json>)"`.
1. Export `export SKIP_LONG_TESTS=yes` if you want to run only the fast running tests.
1. Navigate to project's root folder.
1. Run environment setup script to create buckets `/.github/scripts/gcs/setup.sh`.
1. Run tests `/.github/scripts/gcs/run-int.sh`.
1. Run environment teardown script to delete resources `/.github/scripts/gcs/teardown.sh`.
Loading