diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f503b36..865712d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,4 +5,4 @@ # the repo. Unless a later match takes precedence, # @global-owner1 and @global-owner2 will be requested for # review when someone opens a pull request. -* @jeff-schnitter @rich-jay +* @jeff-schnitter diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml index 68511dc..536f933 100644 --- a/.github/workflows/test-pr.yml +++ b/.github/workflows/test-pr.yml @@ -12,12 +12,8 @@ on: env: AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} CORTEX_API_KEY: ${{ secrets.CORTEX_API_KEY }} - CORTEX_API_KEY_RICH_SANDBOX: ${{ secrets.CORTEX_API_KEY_RICH_SANDBOX }} CORTEX_API_KEY_VIEWER: ${{ secrets.CORTEX_API_KEY_VIEWER }} CORTEX_BASE_URL: ${{ vars.CORTEX_BASE_URL }} - GH_PAT: ${{ secrets.GH_PAT }} - GH_WEBHOOK_SECRET: ${{ secrets.GH_WEBHOOK_SECRET }} - CORTEX_GH_WEBHOOK_URL: ${{ vars.CORTEX_GH_WEBHOOK_URL }} jobs: test: @@ -41,6 +37,7 @@ jobs: - name: Install dependencies run: | + sudo apt update && sudo apt install just python -m pip install --upgrade pip pip install poetry poetry-audit-plugin pytest-cov pytest pytest-xdist @@ -53,11 +50,6 @@ jobs: echo "[default]" > $HOME/.cortex/config echo "api_key = $CORTEX_API_KEY" >> $HOME/.cortex/config echo "base_url = $CORTEX_BASE_URL" >> $HOME/.cortex/config - echo "[rich-sandbox]" >> $HOME/.cortex/config - echo "api_key = $CORTEX_API_KEY_RICH_SANDBOX" >> $HOME/.cortex/config - pwd - ls -l $HOME/.cortex - cat $HOME/.cortex/config shell: bash - name: Install package @@ -67,4 +59,4 @@ jobs: - name: Test with pytest run: | - make all-cli + just test-all diff --git a/.gitignore b/.gitignore index b29761b..2d5197c 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,7 @@ coverage.json .github/workflows/test.yml .export report.html +import.html +report*.html +.load-data-done + diff --git a/HISTORY.md b/HISTORY.md index 503ce51..a6e5d1b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,6 +1,26 @@ Release History =============== +1.0.0 (2025-06-13) +------------------ + +**Improvements** +- A complete re-write centered around the [typer library](https://typer.tiangolo.com/) and better code modularization +- Added sub-commands: + - api-keys + - custom-metrics + - initiatives + - workflows +- Removed sub-commands: + - team hieararchies +- added `--table` and `--csv` options to list commands + +**Breaking Changes** +- custom-events -i timestamp changed to -ts timestamp +- plugins `get` command changed to `list` +- plugins `get-by-tag` subcommand changed to `get` +- `resource-definitions` command changed to `entity-types` + 0.27.0 (2025-01-04) ------------------ diff --git a/Justfile b/Justfile new file mode 100644 index 0000000..5c78318 --- /dev/null +++ b/Justfile @@ -0,0 +1,35 @@ +cortex_cli := 'poetry run cortex' +pytest := 'PYTHONPATH=. poetry run pytest -rA' + +export CORTEX_API_KEY := env('CORTEX_API_KEY') +export CORTEX_BASE_URL := env('CORTEX_BASE_URL', "https://api.getcortexapp.com") +export CORTEX_API_KEY_VIEWER := env('CORTEX_API_KEY_VIEWER') + +help: + @just -l + +_setup: + @if [ -f .coverage ]; then rm .coverage; fi + +# Run all tests +test-all: _setup test-parallel test-serial + +# Run tests that can run in parallel +test-parallel: test-import + {{pytest}} -n auto -m "not setup and not serial" --html=report-parallel.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests + +# Run all tests serially - helpful to see if any tests seem to be hanging +_test-all-individual: test-import + {{pytest}} --html=report-all-invidual.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests + +# Run tests that have to run sequentially +test-serial: test-import + {{pytest}} -n auto -m "serial" --html=report-serial.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests + +# Run import test, a pre-requisite for any tests that rely on test data. +test-import: + {{pytest}} tests/test_import.py --cov=cortexapps_cli --cov-report= + +# Run a single test, ie: just test tests/test_catalog.py +test testname: + {{pytest}} {{testname}} diff --git a/Makefile b/Makefile deleted file mode 100644 index 63aee46..0000000 --- a/Makefile +++ /dev/null @@ -1,268 +0,0 @@ -# -# Environment Variables -# -UNAME_S := $(shell uname -s) - -PYTHON_VENV = ~/.venv/cortex-cli-test - -ifeq ($(CORTEX_CLI),) ## Cortex CLI, defaults to CLI in the repository -export CORTEX_CLI := . $(PYTHON_VENV)/bin/activate; python3 ./cortexapps_cli/cortex.py -q -endif - -ifeq ($(CORTEX_GH_ALIAS),) ## Github alias defined in Cortex GitHub integration, defaults to public-api-test -export CORTEX_GH_ALIAS := public-api-test -endif - -# Change this once we can get WEBHOOK_URL via Cortex API -ifeq ($(CORTEX_GH_WEBHOOK_URL),) ## The GitHub webhook URL defined in Cortex -export CORTEX_GH_WEBHOOK_URL=https://api.getcortexapp.com/api/v1/github/manual-webhook/e0b77380-e7af-4e14-8563-8168651e307e/$(CORTEX_GH_ALIAS) -endif - -# Should only need to change this if using enterprise GitHub. -ifeq ($(GH_URL),) ## GitHub URL, will be used to call the GitHub API to create a webhook -export GH_URL=https://api.github.com -endif - -ifeq ($(GH_ORG),) ## GitHub organization used for GitHub tests -export GH_ORG=cortextests -endif - -ifeq ($(GH_REPO),) ## GitHub repository used for GitHub tests -export GH_REPO=public-api-test-repo -endif - -ifeq ($(CORTEX_API_KEY),) ## Required; Cortex API key with Admin permission - $(error CORTEX_API_KEY is not set) -endif - -ifeq ($(CORTEX_BASE_URL),) ## Required; Cortex base URL for API, ie for cloud this would be https://api.getcortexapp.com - $(error CORTEX_BASE_URL is not set) -endif -ifeq ($(CORTEX_BASE_URL),http://api.local.getcortexapp.com:8080) -export CORTEX_GH_WEBHOOK_URL=$(shell ./scripts/ngrok.sh)/api/v1/github/manual-webhook/a4037bca-c83e-4058-8550-8393826ff642/$(CORTEX_GH_ALIAS) - ifeq ($(NGROK_PORT),) - export NGROK_PORT=8081 - endif -endif - -ifeq ($(CORTEX_ENV),) ## Cortex environment, defaults to 'default'; used to distinguish make build targets between environments; if not set inferred from CORTEX_BASE_URL - ifeq ($(CORTEX_BASE_URL),http://api.local.getcortexapp.com:8080) - export CORTEX_ENV=local - else ifeq ($(CORTEX_BASE_URL),https://api.staging.getcortexapp.com) - export CORTEX_ENV=staging - else ifeq ($(CORTEX_BASE_URL),https://api.getcortexapp.com) - export CORTEX_ENV=prod - else ifeq ($(CORTEX_BASE_URL),http://api.helm.getcortexapp.com) - export CORTEX_ENV=helm - else ifeq ($(CORTEX_ENV),) - export CORTEX_ENV=default - endif -endif - -ifneq ($(CORTEX_TENANT),) ## Used with CORTEX_ENV, if set can help distinguish between different tenants in the same environment - export BUILD_SUBDIR=$(CORTEX_ENV)-$(CORTEX_TENANT) -else - export BUILD_SUBDIR=$(CORTEX_ENV) -endif - -# -# Configuration variables -# -BUILD_DIR = build/$(BUILD_SUBDIR) -BUILD_TOOLS_DIR = $(BUILD_DIR)/tools -export FEATURE_FLAG_EXPORT=$(BUILD_DIR)/ff/feature-flags.json -DATA_DIR = data -ENTITIES := $(shell find $(DATA_DIR) -type f) - -ARCHIVE_ENTITIES = robot-item-sorter inventory-scraper -ARCHIVE_TARGETS := $(ARCHIVE_ENTITIES:%=$(BUILD_DIR)/%.archive) - -CATALOG_ENTITIES := $(wildcard data/catalog/*.yaml) -CATALOG_TARGETS := $(CATALOG_ENTITIES:data/catalog/%.yaml=$(BUILD_DIR)/%.yaml) - -CUSTOM_RESOURCES := $(wildcard data/resource-definitions/*.json) -CUSTOM_RESOURCE_TARGETS := $(CUSTOM_RESOURCES:data/resource-definitions/%.json=$(BUILD_DIR)/%.json) - -FEATURE_FLAG_VARS := $(shell env | grep CORTEX_FF | cut -d= -f1) -FEATURE_FLAGS = $(patsubst CORTEX_FF_%,%,$(FEATURE_FLAG_VARS)) -FEATURE_FLAG_ENVSUBST := $(FEATURE_FLAGS:%=$(BUILD_DIR)/ff/envsubst/%) - -all: info setup feature-flags-dump load-data github test-api ## Setup environment, load data and test -all-cli: all test-cli - -.PHONY: info -info: - @echo "Running test for: $(BUILD_SUBDIR)" - -.PHONY: setup -setup: tools venv ## Setup python virtual environment for testing - -# -# -# Tools setup -# -# -.PHONY: tools -tools: brew jq python3 - -.PHONY: brew -brew: $(BUILD_TOOLS_DIR)/brew | $(BUILD_TOOLS_DIR) - -$(BUILD_TOOLS_DIR)/brew: | $(BUILD_TOOLS_DIR) -ifeq ($(UNAME_S),Darwin) - @which brew > /dev/null || /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" -endif - @touch $@ - -.PHONY: jq -jq: $(BUILD_TOOLS_DIR)/jq | $(BUILD_TOOLS_DIR) - -$(BUILD_TOOLS_DIR)/jq: -ifeq ($(UNAME_S),Darwin) - @which jq > /dev/null || brew install jq -else - @which jq > /dev/null || (echo "jq is not installed"; exit) -endif - @touch $@ - -.PHONY: python3 -python3: ${BUILD_TOOLS_DIR}/python3 | $(BUILD_TOOLS_DIR) - -${BUILD_TOOLS_DIR}/python3: -ifeq ($(UNAME_S),Darwin) - @which python3 > /dev/null || brew install python3 -else - @which python3 > /dev/null || (echo "python3 is not installed"; exit 1) -endif - @touch $@ - -.PHONY: venv -venv: $(PYTHON_VENV) - -$(PYTHON_VENV): requirements.txt - python3 -m venv $@ - . $@/bin/activate; python3 -m pip install --upgrade -r $^ - touch $@ - -.PHONY: help -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | cut -d':' -f1- | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: vars -vars: ## Display variables used for testing - @grep -E 'ifeq.*## .*$$' $(MAKEFILE_LIST) | grep -v grep | sort | sed 's/ifeq.*(//' | sed 's/).*)//' | awk 'BEGIN {FS = "## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: load-data -load-data: catalog-entities archive-entities resource-definitions ## Load data from 'data' directory into Cortex - -.PHONY: archive-entities -archive-entities: $(ARCHIVE_TARGETS) | $(BUILD_DIR) - -.PHONY: catalog-entities -catalog-entities: $(CATALOG_TARGETS) | $(BUILD_DIR) - -$(BUILD_DIR)/%.archive: $(BUILD_TOOLS_DIR)/python3 - @$(CORTEX_CLI) catalog archive -t $(notdir $(basename $@)) - @touch $@ - -$(BUILD_DIR)/%.yaml: data/catalog/%.yaml $(CUSTOM_RESOURCE_TARGETS) - $(CORTEX_CLI) catalog create -f $< - @touch $@ - -.PHONY: resource-definitions -resource-definitions: $(CUSTOM_RESOURCE_TARGETS) | $(BUILD_DIR) - -$(BUILD_DIR)/%.json: data/resource-definitions/%.json | $(BUILD_DIR) - $(CORTEX_CLI) catalog delete-by-type -t $(notdir $(basename $@)) - ($(CORTEX_CLI) resource-definitions get -t $(notdir $(basename $@)) && $(CORTEX_CLI) resource-definitions delete -t $(notdir $(basename $@)) ) || : - $(CORTEX_CLI) resource-definitions create -f $< - @touch $@ - -# -# This target performs token replacement of files in the feature-flags directory and checks -# if the contents of the file have changed since the last time it was built. If so, the -# feature flag is updated in the environment. -# -# This check is beneficial only in local test environments. As of now, no intent to save -# state between runs of an automated build, so all flags would need to be set each test -# cycle. -# -# If these flags can be set all at once and time isn't a concern, this target can most -# likely be removed. -# -.PHONY: feature-flags -feature-flags: feature-flags-dump $(FEATURE_FLAG_ENVSUBST) - -$(BUILD_DIR)/ff/envsubst/%: | $(BUILD_DIR)/ff/envsubst $(BUILD_DIR)/ff/source - @echo "Checking if feature flag $* needs to be updated" - @envsubst < feature-flags/$*.json > $@ - @diff $@ $(BUILD_DIR)/ff/source/$* 2> /dev/null || (. $(PYTHON_VENV)/bin/activate; python tests/feature_flag_set.py $*) - @cp $@ $(BUILD_DIR)/ff/source - @rm $@ - -test: test-api test-cli ## Run pytest for both API and CLI tests in the 'tests' directory - -test-api: feature-flags ## Run pytest for API tests in the 'tests' directory - @if [ -f .coverage ]; then rm .coverage; fi -ifeq ($(CORTEX_API_KEY_VIEWER),) ## Required; Cortex API key with Viewer permission, used in RBAC tests - $(error CORTEX_API_KEY_VIEWER is not set) -endif - -ifeq ($(GH_PAT),) ## GitHub Personal Access Token - $(error GH_PAT is not set) -endif - -ifeq ($(GH_WEBHOOK_SECRET),) ## GitHub webhook secret; defined in the Cortex GitHub configuration and used to create GitHub webhook - $(error GH_WEBHOOK_SECRET is not set) -endif - - @. $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests pytest -rA -n auto -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing $(PYTEST_PARMS) - -test-cli: feature-flags test-api cli-tests ## Run pytest for CLI-specific tests in the 'tests' directory - -cli-tests: ## Run pytest for CLI-specific tests in the 'tests' directory - @. $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests pytest -rA -n 0 -m "serial" --cov=cortexapps_cli --cov-append --cov-report term-missing $(PYTEST_PARMS) - -test-git: feature-flags github ## Run pytest for git tests in the 'tests' directory - @. $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests pytest -k test_git - -.PHONY: clean -clean: clean-data - @rm -rf $(BUILD_DIR) - -clean-data: $(BUILD_TOOLS_DIR)/jq ${ENTITIES} - for entity in $(shell $(CORTEX_CLI) catalog list -g public-api-test | jq -r '.entities[].tag'); do \ - $(CORTEX_CLI) catalog delete -t $$entity; echo "Deleted: $$entity";\ - done - -.PHONY: feature-flags-dump -feature-flags-dump: $(FEATURE_FLAG_EXPORT) ## Dump current feature flags to $(FEATURE_FLAG_EXPORT) - -.PHONY: feature-flags-clean -feature-flags-clean: - @rm -f $(FEATURE_FLAG_EXPORT) - -$(FEATURE_FLAG_EXPORT): | $(BUILD_DIR)/ff - . $(PYTHON_VENV)/bin/activate; python3 tests/feature_flag_dump.py $@ - -.PHONY: github -github: $(BUILD_DIR)/github ## Configure Cortex GitHub integration, create GitHub webhook - -$(BUILD_DIR)/github: | $(BUILD_DIR) - . $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests python3 tests/github_setup.py - touch $@ - -$(BUILD_DIR): - @mkdir -p $@ - -$(BUILD_DIR)/ff: - @mkdir -p $@ - -$(BUILD_DIR)/ff/source: - @mkdir -p $@ - -$(BUILD_TOOLS_DIR): - @mkdir -p $@ - -$(BUILD_DIR)/ff/envsubst: - @mkdir -p $@ diff --git a/README.rst b/README.rst index 298df13..51b6154 100644 --- a/README.rst +++ b/README.rst @@ -58,7 +58,7 @@ Config file ---------------------- The CLI requires an API key for all operations. This key is stored in a config file whose default location is `~/.cortex/config`. -This path can be overridden with the `-c` flag. +This path can be overridden with the `-c` flag. You will be prompted to create the file if it does not exist. Minimal contents of the file: @@ -111,74 +111,10 @@ Example: Commands ---------------------- -Run :code:`cortex -h` to see a list of all commands: - -.. code-block: - - usage: cortex CLI [-h] [-a] [-c CONFIG] [-d] [-n] [-t] [-v] - {audit-logs,backup,catalog,custom-data,custom-events,dependencies,deploys,discovery-audit,docs,groups,integrations,ip-allowlist,on-call,packages,plugins,queries,resource-definitions,scorecards,teams-hierarchies,teams} - ... - - Cortex command line interface - - positional arguments: - {audit-logs,backup,catalog,custom-data,custom-events,dependencies,deploys,discovery-audit,docs,groups,integrations,ip-allowlist,on-call,packages,plugins,queries,resource-definitions,scorecards,teams-hierarchies,teams} - sub-command help - audit-logs audit log commands - backup import/export commands - catalog catalog commands - custom-data custom_data actions - custom-events custom events actions - dependencies dependencies commands - deploys deploys commands - discovery-audit Discovery Audit commands - docs OpenAPI doc commands - groups groups commands - integrations integrations sub-commands - ip-allowlist IP Allowlist information - on-call get on-call information - packages commands to create and modify packages - plugins commands to create and access plugins - queries run CQL queries - resource-definitions - resource definitions - scorecards scorecards API requests - teams-hierarchies commands to create and modify team hierarchies - teams commands to create and modify teams - - options: - -h, --help show this help message and exit - -a , --cliAlias get CLI parms from [TENANT.aliases] in config file - -c CONFIG, --config CONFIG - Config location, default = ~/.cortex/config - -d, --debug Writes request debug information as JSON to stderr - -n, --noObfuscate Do not obfuscate bearer token when debugging - -t , --tenant tenant name defined in ~/.cortex/config, defaults to 'default' - -v, --version show program's version number and exit - - Type 'man cortex' for additional details. - +Run :code:`cortex` to see a list of options and sub-commands. Run :code:`cortex -h` to see a list of all commands for each subcommand. -For example: - -.. code:: bash - - cortex audit-logs -h - -.. code-block:: - - usage: cortex CLI audit-logs [-h] {get} ... - - positional arguments: - {get} audit logs help - get retrieve audit logs - - options: - -h, --help show this help message and exit - - =================== Examples =================== @@ -211,21 +147,23 @@ Your cortex config file will require api keys for both tenants. It would look l .. code-block:: - Getting resource definitions - --> my-resource-1 - Getting catalog entities + Getting catalog --> my-domain-1 --> my-service-1 --> my-service-2 - Getting IP Allowlist definitions + Getting entity-types + --> my-entity-type-1 + Getting ip-allowlist + --> ip-allowlist + Getting plugins + --> my-plugin-1 Getting scorecards --> my-scorecard-1 - Getting teams - --> my-team-1 - --> my-team-2 + Getting workflows + --> my-workflow-1 Export complete! - Contents available in /Users/myUser/.cortex/export/2023-11-19-14-58-14 + Contents available in /Users/myUser/.cortex/export/2025-06-12-14-58-14 **Import** @@ -239,139 +177,23 @@ are automatically imported by Cortex. Cortex does not have access to any keys, integration configurations. ---------------------------------------------------------- -Export all services from one tenant; import into another ---------------------------------------------------------- - -This example shows how to export services from a tenant named :code:`myTenant-dev` and import those services into a tenant -named :code:`myTenant`. It is similar to the full export example "`Export from one tenant; import into another`_", but only -exports/imports services. - -Your cortex config file will require api keys for both tenants. It would look like this: - -.. code-block:: - - [myTenant] - api_key = - - [myTenant-dev] - api_key = - - -**Option 1: export service YAMLs to a directory and then import them** - -This option is helpful in case you want to save the entity YAML files. It makes it easy to restart or retry an import -because you will have all YAMLs saved on disk. - -**Export** - -.. code:: bash - - mkdir -p /tmp/cortex-export - cd /tmp/cortex-export - for service in `cortex -t myTenant catalog list -t service | jq -r ".entities[].tag" | sort` - do - cortex -t myTenant catalog descriptor -y -t ${service} > ${service}.yaml - done - -**Import** - -.. code:: bash - - cd /tmp/cortex-export - for file in `ls -1 *.yaml` - do - cortex -t myTenant-dev catalog create -f ${file} - done - -**Option 2: combine the export and import in a single command** - -This option is simpler and doesn't require any disk operations. However, if it fails for any reason you have to run the -entire export/import in its entirety. - -.. code:: bash - - for service in `cortex -t myTenant catalog list -t service | jq -r ".entities[].tag" | sort` - do - echo "Processing service: ${service}" - cortex -t myTenant catalog descriptor -y -t ${service} | cortex -t myTenant-dev catalog create -f- - done - ---------------------------------------------------------- -Export all domains from one tenant; import into another ---------------------------------------------------------- - -This example shows how to export domains from a tenant named :code:`myTenant-dev` and import those domains into a tenant -named :code:`myTenant`. It is similar to the full export example "`Export from one tenant; import into another`_", but only -exports/imports domains. - -Your cortex config file will require api keys for both tenants. It would look like this: - -.. code-block:: - - [myTenant] - api_key = - - [myTenant-dev] - api_key = - - -**Option 1: export domain YAMLs to a directory and then import them** - -This option is helpful in case you want to save the entity YAML files. It makes it easy to restart or retry an import -because you will have all YAMLs saved on disk. - -**Export** - -.. code:: bash - - mkdir -p /tmp/cortex-export - cd /tmp/cortex-export - for domain in `cortex -t myTenant catalog list -t domain | jq -r ".entities[].tag" | sort` - do - echo "creating ${domain}.yaml" - cortex -t myTenant catalog descriptor -y -t ${domain} > ${domain}.yaml - done - -**Import** - -.. code:: bash - - cd /tmp/cortex-export - for file in `ls -1 *.yaml` - do - cortex -t myTenant-dev catalog create -f ${file} - done - -**Option 2: combine the export and import in a single command** - -This option is simpler and doesn't require any disk operations. However, if it fails for any reason you have to run the -entire export/import in its entirety. - -.. code:: bash - - for domain in `cortex -t myTenant catalog list -t domain | jq -r ".entities[].tag" | sort` - do - echo "Processing domain: ${domain}" - cortex -t myTenant catalog descriptor -y -t ${domain} | cortex -t myTenant-dev catalog create -f- - done - - ------------------------ Iterate over all domains ------------------------ .. code:: bash - for domain in `cortex catalog list -t domain | jq -r ".entities[].tag" | sort`; do echo "domain = $domain"; done + for domain in `cortex catalog list -t domain --csv -C tag --sort tag:asc`; do echo "domain = $domain"; done ---------------------- Iterate over all teams ---------------------- +**NOTE:** as of June 2025, requires a feature flag enabled to return team entities in the catalog API. Work with your CSM if you need assistance. + .. code:: bash - for team in `cortex catalog list -t team | jq -r ".entities[].tag" | sort`; do echo "team = $team"; done + for team in `cortex catalog list -t team --csv -C tag --sort tag:asc`; do echo "team = $team"; done ------------------------- Iterate over all services @@ -379,7 +201,7 @@ Iterate over all services .. code:: bash - for service in `cortex catalog list -t service | jq -r ".entities[].tag" | sort`; do echo "service = $service"; done + for service in `cortex catalog list -t service --csv -C tag --sort tag:asc`; do echo "service = $service"; done ----------------------------- Get git details for a service @@ -404,7 +226,7 @@ Add a suffix to all x-cortex-tag values for services .. code:: bash - for service in `cortex catalog list -t service | jq -r ".entities[].tag" | sort`; do + for service in `cortex catalog list -t service --csv -C tag --sort tag:asc`; do cortex catalog descriptor -y -t ${service} | yq '.info.x-cortex-tag |= . + "-suffix"' | cortex catalog create -f- done @@ -435,7 +257,7 @@ Remove a group from domains .. code:: bash - for domain in `cortex catalog list -t domain -g my-old-group | jq -r ".entities[].tag" | sort`; do + for domain in `cortex catalog list -t domain --csv -C tag --sort tag:asc`; do cortex catalog descriptor -y -t ${domain} | yq -e '.info.x-cortex-groups -= [ "my-old-group" ]' | cortex catalog create -f- done @@ -461,7 +283,7 @@ Modify all github basepath values for domain entitities, changing '-' to '_' .. code:: bash - for domain in `cortex catalog list -t domain | jq -r ".entities[].tag"`; do + for domain in `cortex catalog list -t domain --csv -C tag --sort tag:asc`; do cortex catalog descriptor -y -t ${domain} | yq ".info.x-cortex-git.github.basepath |= sub(\"-\", \"_\")" | cortex catalog create -f- done @@ -488,7 +310,7 @@ Create a backup of all scorecards .. code:: bash - for tag in `cortex scorecards list | jq -r ".scorecards[].tag"` + for tag in `cortex scorecards list --csv -C tag` do echo "backing up: ${tag}" cortex scorecards descriptor -t ${tag} > ${tag}.yaml @@ -503,7 +325,7 @@ and it appends " Draft" to the end of the existing title. .. code:: bash - for tag in `cortex scorecards list | jq -r ".scorecards[].tag"` + for tag in `cortex scorecards list --csv -C tag` do cortex scorecards descriptor -t ${tag} | yq '.draft = true | .tag += "-draft" | .name += " Draft"' | cortex scorecards create -f- done @@ -517,7 +339,7 @@ which the drafts were created and delete the drafts. .. code:: bash - for tag in `cortex scorecards list -s | jq -r ".scorecards[].tag" | grep "\-draft$"` + for tag in `cortex scorecards list --csv -C tag --filter tag=.*-draft` do cortex scorecards descriptor -t ${tag} | yq '.draft = false | .tag |= sub("-draft","") | .name |= sub(" Draft", "")' | cortex scorecards create -f- && cortex scorecards delete -t ${tag} done @@ -530,7 +352,7 @@ This recipe is similar to the one above, but it does not create a new scorecard .. code:: bash - for tag in `cortex scorecards list -s | jq -r ".scorecards[].tag" | grep "\-draft$"` + for tag in `cortex scorecards list --csv -C tag --filter tag=.*-draft` do cortex scorecards descriptor -t ${tag} | yq '.draft = false | .tag |= sub("-draft","") | .name |= sub(" Draft", "")' > ${tag}.yaml done @@ -586,37 +408,6 @@ Run this command for two different scorecards and diff the csv files to compare sdiff -s /tmp/scorecard1.csv /tmp/scorecard2.csv ------------------------------------------------------------------------------ -Backup all Workday teams ------------------------------------------------------------------------------ - -This recipe is helpful if you change your Workday report and want to save your existing teams in case you want to restore them. - -For each team it will create two files: -- a JSON file that contains the Workday data -- a Cortex team YAML file that refers to the Workday team - -.. code:: bash - - for team in `cortex teams list | jq -r '.teams[] | select (.type == "IDP") | select (.idpGroup.provider == "WORKDAY") | .teamTag'` - do - cortex teams get -t ${team} > ${team}.json - cortex catalog descriptor -y -t ${team} > ${team}.yaml - done - ------------------------------------------------------------------------------ -Delete all Workday teams ------------------------------------------------------------------------------ - -This recipe is helpful if you want to remove all Workday teams and import from scratch. - -.. code:: bash - - for team in `cortex teams list | jq -r '.teams[] | select (.type == "IDP") | select (.idpGroup.provider == "WORKDAY") | .teamTag'` - do - cortex teams delete -t ${team} - done - ----------------------------------------------------------------------------- Add provider for all group type owners where provider is not listed ----------------------------------------------------------------------------- diff --git a/STYLE.md b/STYLE.md new file mode 100644 index 0000000..724b2ed --- /dev/null +++ b/STYLE.md @@ -0,0 +1,55 @@ +# CLI commands style guide (WIP) + +Here are some guidelines on developing commands for Cortex CLI + +## Flags and arguments + +* Prefer flags over arguments, so that command actions are clearer and future additions are less likely to break existing scripts. +* Flags should have a long two-dash version and a short single-dash version: `--long-version`, `-l`. +* Try to use the same short version flag everywhere. We want to avoud having a single letter flag that means different things in different commands. +* Flags that are multiple words should be in `kebab-case`. + +## General forms + +Commands should be readable and easy to understand. In general, the parts of a command may be: + +* Executable name e.g., `cortex` +* Global flags that affect the behavior of the executable as a whole, like `--tenant` or `--config` +* Top-level object type or topic e.g., `team`, `catalog` +* Top level verb e.g., `create`, `list`, `add` +* Verb objects, if applicable, e.g., `links`, `description` +* Command-specific arguments and flags, e.g., `--description`, `--file` + +Examples: +``` +# list catalog entries of type 'service' and include ownership info +cortex catalog list --include-owners --types service + +# create a team from a file +cortex teams create --file input.json + +# add a link to a team +cortex teams add link --url https://www.catster.com --type documentation --name Catster +``` + +## Standard verbs + +Recommendations for verbs to use in the CLI, and their meanings: + +* **list** - List out a resource of which there may be many. If the endpoing is paginated, retrieve all pages by default. Optionally provide `--page` and `--page-size` to allow the user to get a single page. This should map to either a get or a fetch in the client. Provide options to the user for table and CSV output. + +* **get** - Get the full details of a specific object. This would usually map to a HTTPS GET. The user would expect to see detailed information about a single object. + +* **create** - Create an object. If your command is not creating an object but rather adding information to an existing object, it should be called **add** rather than create. Create should fail if the object already exists. Consider adding `--replace-existing` and `--update-existing` flags if you want to allow this behavior for users. Original create commands required the full definition of the object in JSON or YAML; all new create commands should have this as default behavior as well. Consider adding flags to also allow creation of objects without a full JSON/YAML object definition. + +* **delete** - Delete an object. If the the terminal is interactive, prompt the user to make sure they really want to delete. Provide a `--force` flag that skips the prompt, but when the terminal is interactive says what it's going to do and waits ten seconds in case the user changes their mind; instruct the user to hit Ctrl+C to abort. When the terminal is not interactive (when the delete command is happening as part of a script of batch process) delete with `--force` should succeed immediately and delete without `--force` should fail immediately. + +* **update** - Make changes to an existing object. Accept a full object definition in JSON or YAML as appropriate. Ideally, also accept a partial object definition. If the object definition is valid, retrieve the existing object, merge the changes in the provided definition, and apply the update. + +* **archive/unarchive** - In some cases, these operations could be accomplished via **update** but they should be provided as separate verbs as well. + +* **add/remove** - Add items to or remove items from object attributes that are lists. In many cases this could be accomplished by **get/update** above, but in the case of commonly used attributes like *links* they should be provided as separate verbs as well. Unlike **delete**, it's not necessary to prompt or warn the user before executing **remove**. Consider also providing a **list** subcommand to list existing values in the attribute. + +* **set/unset** - Set or unset object attributes that are not lists. In many cases this could be accomplished by **get/update** above, but in the case of commonly used attributes like *description* they should be provided as separate verbs as well. Unlike **delete**, it's not necessary to prompt or warn the user before executing **unset**. Consider also providing a **show** command to show the existing value of the attribute. + +* **open** - Open the specified object(s) in the user's browser. Fail immediately if the terminal is not active or a browser is not available. Consider warning the user if this would result in opening more than 3 browser tabs. diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py new file mode 100755 index 0000000..4d3e0be --- /dev/null +++ b/cortexapps_cli/cli.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 + +import typer +from typing_extensions import Annotated + +import os +import sys +import importlib.metadata +import tomllib +import configparser +import logging + +from cortexapps_cli.cortex_client import CortexClient + +import cortexapps_cli.commands.api_keys as api_keys +import cortexapps_cli.commands.audit_logs as audit_logs +import cortexapps_cli.commands.backup as backup +import cortexapps_cli.commands.catalog as catalog +import cortexapps_cli.commands.custom_data as custom_data +import cortexapps_cli.commands.custom_events as custom_events +import cortexapps_cli.commands.custom_metrics as custom_metrics +import cortexapps_cli.commands.dependencies as dependencies +import cortexapps_cli.commands.deploys as deploys +import cortexapps_cli.commands.discovery_audit as discovery_audit +import cortexapps_cli.commands.docs as docs +import cortexapps_cli.commands.entity_types as entity_types +import cortexapps_cli.commands.gitops_logs as gitops_logs +import cortexapps_cli.commands.groups as groups +import cortexapps_cli.commands.initiatives as initiatives +import cortexapps_cli.commands.integrations as integrations +import cortexapps_cli.commands.ip_allowlist as ip_allowlist +import cortexapps_cli.commands.on_call as on_call +import cortexapps_cli.commands.packages as packages +import cortexapps_cli.commands.plugins as plugins +import cortexapps_cli.commands.queries as queries +import cortexapps_cli.commands.rest as rest +import cortexapps_cli.commands.scim as scim +import cortexapps_cli.commands.scorecards as scorecards +import cortexapps_cli.commands.teams as teams +import cortexapps_cli.commands.workflows as workflows + +app = typer.Typer( + no_args_is_help=True, + rich_markup_mode="rich", + context_settings={"help_option_names": ["-h", "--help"]} +) + +# add subcommands +app.add_typer(api_keys.app, name="api-keys") +app.add_typer(audit_logs.app, name="audit-logs") +app.add_typer(backup.app, name="backup") +app.add_typer(catalog.app, name="catalog") +app.add_typer(custom_data.app, name="custom-data") +app.add_typer(custom_events.app, name="custom-events") +app.add_typer(custom_metrics.app, name="custom-metrics") +app.add_typer(dependencies.app, name="dependencies") +app.add_typer(deploys.app, name="deploys") +app.add_typer(discovery_audit.app, name="discovery-audit") +app.add_typer(docs.app, name="docs") +app.add_typer(entity_types.app, name="entity-types") +app.add_typer(gitops_logs.app, name="gitops-logs") +app.add_typer(groups.app, name="groups") +app.add_typer(initiatives.app, name="initiatives") +app.add_typer(integrations.app, name="integrations") +app.add_typer(ip_allowlist.app, name="ip-allowlist") +app.add_typer(on_call.app, name="on-call") +app.add_typer(packages.app, name="packages") +app.add_typer(plugins.app, name="plugins") +app.add_typer(queries.app, name="queries") +app.add_typer(rest.app, name="rest") +app.add_typer(scim.app, name="scim") +app.add_typer(scorecards.app, name="scorecards") +app.add_typer(teams.app, name="teams") +app.add_typer(workflows.app, name="workflows") + +# global options +@app.callback() +def global_callback( + ctx: typer.Context, + api_key: str = typer.Option(None, "--api-key", "-k", help="API key", envvar="CORTEX_API_KEY"), + url: str = typer.Option("https://api.getcortexapp.com", "--url", "-u", help="Base URL for the API", envvar="CORTEX_BASE_URL"), + config_file: str = typer.Option(os.path.join(os.path.expanduser('~'), '.cortex', 'config'), "--config", "-c", help="Config file path", envvar="CORTEX_CONFIG"), + tenant: str = typer.Option("default", "--tenant", "-t", help="Tenant alias", envvar="CORTEX_TENANT_ALIAS"), + log_level: Annotated[str, typer.Option("--log-level", "-l", help="Set the logging level")] = "INFO" +): + if not ctx.obj: + ctx.obj = {} + + numeric_level = getattr(logging, log_level.upper(), None) + if not isinstance(numeric_level, int): + raise ValueError(f"Invalid log level: {log_level}") + + if not os.path.isfile(config_file): + # no config file found + if not api_key: + raise typer.BadParameter("No API key provided and no config file found") + create_config = False + + # check if we are in a terminal, if so, ask the user if they want to create a config file + if sys.stdin.isatty() and sys.stdout.isatty(): + create_config = typer.confirm("No config file found. Do you want to create one?") + + if create_config: + os.makedirs(os.path.dirname(config_file), exist_ok=True) + with open(config_file, "w") as f: + f.write(f"[{tenant}]\n") + f.write(f"api_key = {api_key}\n") + f.write(f"base_url = {url}\n") + else: + # config file found + # if api_key is provided, use that in preference to the config file + if not api_key: + config = configparser.ConfigParser() + config.read(config_file) + if tenant not in config: + raise typer.BadParameter(f"Tenant {tenant} not found in config file") + api_key = config[tenant]["api_key"] + if url not in config[tenant]: + url = url + else: + url = config[tenant]["base_url"] + + # strip any quotes or spaces from the api_key and url + api_key = api_key.strip('"\' ') + url = url.strip('"\' /') + + ctx.obj["client"] = CortexClient(api_key, tenant, numeric_level, url) + +@app.command() +def version(): + + """ + Show the version and exit. + """ + try: + with open("pyproject.toml", "rb") as f: + pyproject = tomllib.load(f) + version = pyproject["tool"]["poetry"]["version"] + except Exception as e: + version = importlib.metadata.version('cortexapps_cli') + print(version) + +if __name__ == "__main__": + app() diff --git a/cortexapps_cli/command_options.py b/cortexapps_cli/command_options.py new file mode 100644 index 0000000..ca601cd --- /dev/null +++ b/cortexapps_cli/command_options.py @@ -0,0 +1,43 @@ +import typer +from typing import List, Optional +from typing_extensions import Annotated + +class ListCommandOptions: + table_output = Annotated[ + Optional[bool], + typer.Option("--table", help="Output the response as a table", show_default=False) # , callback=table_output_cb) + ] + csv_output = Annotated[ + Optional[bool], + typer.Option("--csv", help="Output the response as CSV", show_default=False) # , callback=csv_output_cb) + ] + columns = Annotated[ + Optional[List[str]], + typer.Option("--columns", "-C", help="Columns to include in the table, in the format HeaderName=jsonpath", show_default=False) + ] + filters = Annotated[ + Optional[List[str]], + typer.Option("--filter", "-F", help="Filters to apply on rows, in the format jsonpath=regex", show_default=False) + ] + no_headers = Annotated[ + Optional[bool], + typer.Option("--no-headers", help="For csv output type only: don't print header columns.", show_default=False) + ] + sort = Annotated[ + Optional[List[str]], + typer.Option("--sort", "-S", help="Sort order to apply on rows, in the format jsonpath:asc or jsonpath:desc", show_default=False) + ] + page = Annotated[ + Optional[int], + typer.Option("--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages", show_default=False) + ] + page_size = Annotated[ + Optional[int], + typer.Option("--page-size", "-z", help="Page size for results", show_default=False) + ] + +class CommandOptions: + _print = Annotated[ + Optional[bool], + typer.Option("--print", help="If result should be printed to the terminal", hidden=True) + ] diff --git a/cortexapps_cli/commands/api_keys.py b/cortexapps_cli/commands/api_keys.py new file mode 100644 index 0000000..3fb585b --- /dev/null +++ b/cortexapps_cli/commands/api_keys.py @@ -0,0 +1,152 @@ +from datetime import datetime +import typer +import json +from enum import Enum +from typing_extensions import Annotated +from cortexapps_cli.utils import print_output_with_context +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions + +app = typer.Typer( + help="API Keys commands", + no_args_is_help=True +) + +class DefaultRole(str, Enum): + ADMIN = "ADMIN" + USER = "USER" + READ_ONLY = "READ_ONLY" + +@app.command() +def list( + ctx: typer.Context, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List API keys. The API key used to make the request must have the Edit API keys permission. + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "CID=cid", + "Name=name", + "Last4=last4", + "Description=description", + "Roles=roles", + "CreatedDate=createdDate", + "ExpirationDate=expirationDate", + ] + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + r = client.fetch("api/v1/auth/key", params=params) + else: + r = client.get("api/v1/auth/key", params=params) + print_output_with_context(ctx, r) + +@app.command() +def create( + ctx: typer.Context, + description: str | None = typer.Option(None, "--description", "-d", help="Description of the API key"), + name: str | None = typer.Option(None, "--name", "-n", help="Name of the API key"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing content; can be passed as stdin with -, example: -f-")] = None, + default_roles: str | None = typer.Option(None, "--default-roles", "-dr", help="Comma-separated list of default roles (only if file input not provided."), + custom_roles: str | None = typer.Option(None, "--custom-roles", "-cr", help="Comma-separated list of custom roles (only if file input not provided."), + expiration_date: datetime | None = typer.Option(None, "--expiration-date", "-e", help="Expiration date of the API key", formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Create new API key. The API key used to make the request must have the Create API keys permission + """ + client = ctx.obj["client"] + + if file_input: + if name or description or expiration_date or default_roles or custom_roles: + raise typer.BadParameter("When providing an API definition file, do not specify any other attributes") + data = json.loads("".join([line for line in file_input])) + else: + if not default_roles and not custom_roles: + raise typer.BadParameter("One of default-roles or custom-roles is required") + + data = { + "roles": [], + "name": name + } + + if default_roles is not None: + for role in default_roles.split(","): + data["roles"].append({"role": role, "type": "DEFAULT"}) + if custom_roles is not None: + for role in custom_roles.split(","): + data["roles"].append({"tag": role, "type": "CUSTOM"}) + + if description: + data["description"] = description + if expiration_date: + data["expirationDate"] = expiration_date.strftime('%Y-%m-%dT%H:%M:%S.000Z') + + r = client.post("api/v1/auth/key", data=data) + print_output_with_context(ctx, r) + +@app.command() +def update( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="The unique, auto-generated identifier for the API key"), + description: str | None = typer.Option(None, "--description", "-d", help="Description of the API key"), + name: str = typer.Option(..., "--name", "-n", help="Name of the API key"), +): + """ + Update API key. The API key used to make the request must have the Edit API keys permission. + """ + client = ctx.obj["client"] + + data = { + "name": name + } + if description is not None: + data["description"] = description + + r = client.put("api/v1/auth/key/" + cid, data=data) + print_output_with_context(ctx, r) + +@app.command() +def get( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="The unique, auto-generated identifier for the API key"), +): + """ + Get API key. + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/auth/key/"+ cid) + print_output_with_context(ctx, r) + +@app.command() +def delete( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="The unique, auto-generated identifier for the API key"), +): + """ + Delete API key. + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/auth/key/"+ cid) diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py new file mode 100644 index 0000000..a2ea3e9 --- /dev/null +++ b/cortexapps_cli/commands/audit_logs.py @@ -0,0 +1,97 @@ +from datetime import datetime +from enum import Enum +import typer +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer( + help="Audit log commands", + no_args_is_help=True +) + +class Action(str, Enum): + CREATE = "CREATE" + DELETE = "DELETE" + UPDATE = "UPDATE" + +class ActorType(str, Enum): + ANONYMOUS = "ANONYMOUS" + API_KEY = "API_KEY" + BACKSTAGE = "BACKSTAGE" + OAUTH2 = "OAUTH2" + PERSONAL_API_KEY = "PERSONAL_API_KEY" + +class ActorRequestType(str, Enum): + API_KEY_ENTITY = "API_KEY_ENTITY" + ATLASSIAN_WEBHOOK = "ATLASSIAN_WEBHOOK" + SCORECARD_BADGES = "SCORECARD_BADGES" + SLACK_COMMAND = "SLACK_COMMAND" + +@app.command() +def get( + ctx: typer.Context, + actions: list[Action] | None = typer.Option(None, "--actions", "-a", help="The audit action"), + actorApiKeyIdentifiers: list[str] | None = typer.Option(None, "--actorApiKeyIdentifiers", "-ak", help="API key name associated with audit event"), + actorEmails: list[str] | None = typer.Option(None, "--actorEmails", "-ae", help="Email address associated with audit event"), + actorIpAddresses: list[str] | None = typer.Option(None, "--actorIpAddresses", "-ai", help="Source IP Addresses associated with audit event"), + actorRequestTypes: list[ActorRequestType] | None = typer.Option(None, "--actorRequestTypes", "-ar", help="Request event associated with audit event"), + actorTypes: list[ActorType] | None = typer.Option(None, "--actorTypes", "-at", help="Actor that triggered the audit event"), + end_time: datetime = typer.Option(None, "--end-time", "-e", help="End time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + objectIdentifiers: list[str] | None = typer.Option(None, "--objectIdentifiers", "-oi", help="The name of the Cortex object that was modified, ie x-cortex-tag value, metadata field name, etc."), + objectTypes: list[str] | None = typer.Option(None, "--objectTypes", "-ot", help="ObjectTypes"), + start_time: datetime = typer.Option(None, "--start-time", "-s", help="Start time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + Note: To see the complete list of possible values, please reference the available filter options for audit logs under Settings in the app. + """ + client = ctx.obj["client"] + + params = { + "actions": actions, + "actorApiKeyIdentifiers": actorApiKeyIdentifiers, + "actorEmails": actorEmails, + "actorIpAddresses": actorIpAddresses, + "actorRequestTypes": actorRequestTypes, + "actorTypes": actorTypes, + "endTime": end_time, + "objectIdentifiers": objectIdentifiers, + "objectTypes": objectTypes, + "page": page, + "pageSize": page_size, + "startTime": start_time + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # convert datetime and list types to string + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + if str(type(v)) == "": + params[k] = ','.join(v) + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Action=action", + "ObjectType=objectType", + "ActorIdentifier=actorIdentifier", + "ObjectIdentifier=objectIdentifier", + "IpAddress=ipAddress", + "Timestamp=timestamp", + ] + + if page is None: + r = client.fetch("api/v1/audit-logs", params=params) + else: + r = client.get("api/v1/audit-logs", params=params) + + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/backup.py b/cortexapps_cli/commands/backup.py new file mode 100644 index 0000000..fa31212 --- /dev/null +++ b/cortexapps_cli/commands/backup.py @@ -0,0 +1,310 @@ +from datetime import datetime +from typing import Optional +from typing import List +from typing_extensions import Annotated +import typer +import json +import os +from rich import print, print_json +from rich.console import Console +from enum import Enum +import yaml + +import cortexapps_cli.commands.scorecards as scorecards +import cortexapps_cli.commands.catalog as catalog +import cortexapps_cli.commands.entity_types as entity_types +import cortexapps_cli.commands.ip_allowlist as ip_allowlist +import cortexapps_cli.commands.plugins as plugins +import cortexapps_cli.commands.workflows as workflows + + +app = typer.Typer(help="Backup commands") + +# Need to support the following: +# DONE -> Catalog +# Custom Data from API +# Custom Events +# Custom Metrics +# Dependencies from API +# Deploys from API +# Docs +# Eng Intel - User Labels +# DONE -> Entity Types +# Groups from API -> would have to loop over all entities +# DONE -> IP Allowlist +# Packages from API -> would have to loop over all entities +# DONE -> Plugins +# DONE -> Scorecards +# Secrets +# DONE -> Workflows + +def _create_directory(directory): + if not os.path.isdir(directory): + os.mkdir(directory, 0o700) + +def _directory_name(directory, backup_type): + directory = directory + "/" + backup_type + _create_directory(directory) + print("Getting " + backup_type) + return directory + +def _file_name(directory, tag, content, extension): + print("--> " + tag) + file = directory + "/" + tag + "." + extension + if extension == "json": + is_json = True + else: + is_json = False + _write_file(content, file, is_json) + +def _write_file(content, file, is_json=False): + with open(file, 'w') as f: + if is_json: + print(content, file=f) + else: + f.write(str(content) + "\n") + f.close() + +def _export_catalog(ctx, directory, catalog_types): + directory = _directory_name(directory, "catalog") + + data = catalog.list_descriptors(ctx, types=catalog_types, page_size=1000, yaml="true", _print=False) + + for descriptor in data['descriptors']: + try: + y = yaml.safe_load(str(descriptor)) + tag = y['info']['x-cortex-tag'] + y = yaml.dump(y, default_flow_style=False) + except: + print("error") + print(str(descriptor)) + continue + finally: + # Slash will be interpreted as a sub-directory + tag = tag.replace("/", "-") + _file_name(directory, tag, y, "yaml") + +def _export_entity_types(ctx, directory): + directory = _directory_name(directory, "entity-types") + + data = entity_types.list(ctx, include_built_in=False, page=0, page_size=250, _print=False) + definitions_sorted = sorted(data['definitions'], key=lambda x: x["type"]) + + for definition in definitions_sorted: + tag = definition['type'] + json_string = json.dumps(definition, indent=4) + _file_name(directory, tag, json_string, "json") + +def _export_ip_allowlist(ctx, directory): + directory = _directory_name(directory, "ip-allowlist") + file = directory + "/ip-allowlist.json" + + content = ip_allowlist.get(ctx, page=None, page_size=None, _print=False) + _file_name(directory, "ip-allowlist", str(content), "json") + +def _export_plugins(ctx, directory): + directory = _directory_name(directory, "plugins") + + list = plugins.list(ctx, _print=False, include_drafts="true", page=None, page_size=None) + tags = [plugin["tag"] for plugin in list["plugins"]] + tags_sorted = sorted(tags) + for tag in tags_sorted: + content = plugins.get(ctx, tag_or_id=tag, include_blob="true", _print=False) + _file_name(directory, tag, content, "json") + +def _export_scorecards(ctx, directory): + directory = _directory_name(directory, "scorecards") + + list = scorecards.list(ctx, show_drafts=True, page=None, page_size=None, _print=False) + tags = [scorecard["tag"] for scorecard in list["scorecards"]] + tags_sorted = sorted(tags) + for tag in tags_sorted: + content = scorecards.descriptor(ctx, scorecard_tag=tag, _print=False) + _file_name(directory, tag, content, "yaml") + +def _export_workflows(ctx, directory): + directory = _directory_name(directory, "workflows") + + list = workflows.list(ctx, _print=False, include_actions="false", page=None, page_size=None, search_query=None) + tags = [workflow["tag"] for workflow in list["workflows"]] + tags_sorted = sorted(tags) + for tag in tags_sorted: + try: + content = workflows.get(ctx, tag=tag, yaml="true", _print=False) + _file_name(directory, tag, content, "yaml") + except: + print("failed for " + tag) + +backupTypes = { + "catalog", + "entity-types", + "ip-allowlist", + "plugins", + "scorecards", + "workflows" +} +backupString = ','.join(backupTypes) + +def _parse_export_types(value: str) -> List[str]: + if value == "all": + return backupTypes + types = [] + for val in value: + for item in val.split(","): + if item not in backupTypes: + raise typer.BadParameter(item + " is not a valid type. Valid types are: " + backupString + ".") + else: + types.append(item) + return types + +def _parse_catalog_types(ctx, catalog_types): + data = entity_types.list(ctx, include_built_in=True, page=0, page_size=250, _print=False) + + built_in = ['service', 'team', 'domain'] + tags = [entity_type["type"] for entity_type in data["definitions"]] + tags_sorted = sorted(tags + built_in) + all_types_string = ','.join(tags_sorted) + if catalog_types == "all": + return all_types_string + + for item in catalog_types.split(","): + if item not in tags_sorted: + raise typer.BadParameter(item + " is not a valid type. Valid types are: " + all_types_string + ".") + return catalog_types + +@app.command() +def export( + ctx: typer.Context, + export_types: List[str] = typer.Option(_parse_export_types("all"), "--export-types", "-e", help="some help test", callback=_parse_export_types), + catalog_types: str = typer.Option("all", "--catalog-types", "-c", help="Comma separated list of catalog types to export, defaults to service,team,domain plus all user-created entity-types"), + directory: str = typer.Option(os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), "--directory", "-d", help="Location of export directory, defaults to ~/.cortex/export/-tenant"), +): + """ + Export tenant + + Exports the following objects: + - catalog + - entity-types + - ip-allowlist + - plugins + - scorecards + - workflows + + By default, it does not export any entities that would be created by an integration, for example AWS objects. This is because these + entities are maintained by the integration and do not need to be backed up. + + However, these entities can be export by referencing them in the catalog-types parameter, for example this command + would export all AWS S3 buckets:: + + cortex backup export --export-types catalog --catalog-types AWS::S3::Bucket + + It does not back up everything in the tenant. For example these objects are not backed up: + - api-keys + - custom-events + - custom-metadata created by the public API + - custom-metrics + - dependencies created by the API + - deploys + - docs created by the API + - entity-relationships created by the API + - groups added by the API + - packages + - secrets + + In general, if there is a bulk export API method for a Cortex object, it will be included in the export. + """ + + export_types = sorted(list(set(export_types))) + + client = ctx.obj["client"] + catalog_types = _parse_catalog_types(ctx, catalog_types) + directory = directory + "-" + client.tenant + _create_directory(directory) + if "catalog" in export_types: + _export_catalog(ctx, directory, catalog_types) + if "entity-types" in export_types: + _export_entity_types(ctx, directory) + if "ip-allowlist" in export_types: + _export_ip_allowlist(ctx, directory) + if "plugins" in export_types: + _export_plugins(ctx, directory) + if "scorecards" in export_types: + _export_scorecards(ctx, directory) + if "workflows" in export_types: + _export_workflows(ctx, directory) + + print("\nExport complete!") + print("Contents available in " + directory) + +def _import_ip_allowlist(ctx, directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in os.listdir(directory): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + ip_allowlist.replace(ctx, file_input=open(file_path), addresses=None, force=False, _print=False) + +def _import_entity_types(ctx, force, directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + entity_types.create(ctx, file_input=open(file_path), force=force) + +def _import_catalog(ctx, directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + catalog.create(ctx, file_input=open(file_path), _print=False) + +def _import_plugins(ctx, directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + plugins.create(ctx, file_input=open(file_path), force=True) + +def _import_scorecards(ctx, directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + scorecards.create(ctx, file_input=open(file_path), dry_run=False) + +def _import_workflows(ctx, directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + workflows.create(ctx, file_input=open(file_path)) + +@app.command("import") +def import_tenant( + ctx: typer.Context, + directory: str = typer.Option(..., "--directory", "-d", help="Location of import directory."), + force: bool = typer.Option(False, "--force", help="Recreate entities if they already exist."), +): + """ + Import data into tenant + """ + + client = ctx.obj["client"] + + _import_ip_allowlist(ctx, directory + "/ip-allowlist") + _import_entity_types(ctx, force, directory + "/entity-types") + _import_catalog(ctx, directory + "/catalog") + _import_plugins(ctx, directory + "/plugins") + _import_scorecards(ctx, directory + "/scorecards") + _import_workflows(ctx, directory + "/workflows") diff --git a/cortexapps_cli/commands/backup_commands/cortex_export.py b/cortexapps_cli/commands/backup_commands/cortex_export.py new file mode 100644 index 0000000..5134acd --- /dev/null +++ b/cortexapps_cli/commands/backup_commands/cortex_export.py @@ -0,0 +1,174 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Azure Devops commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + organization_slug: str = typer.Option(..., "--organization-slug", "-o", help="Identifier for organization"), + personal_access_token: str = typer.Option(..., "--pat", "-p", help="Personal Access Token"), + username: str = typer.Option(..., "--username", "-u", help="Username"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or is_default or host or organization_slug or personal_access_token or username: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "host": host, + "isDefault": is_default, + "organizationSlug": organization_slug, + "personalAccessToken": personal_access_token, + "username": username + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/azure-devops/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/azure-devops/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py new file mode 100644 index 0000000..dba4679 --- /dev/null +++ b/cortexapps_cli/commands/catalog.py @@ -0,0 +1,365 @@ +import typer +from typing import Optional, List +from typing_extensions import Annotated + +from cortexapps_cli.command_options import ListCommandOptions, CommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer(help="Catalog commands", no_args_is_help=True) + +class CatalogCommandOptions: + include_archived = Annotated[ + Optional[bool], + typer.Option("--include-archived", "-a", help="Include archived entities", show_default=False) + ] + hierarchy_depth = Annotated[ + Optional[str], + typer.Option("--hierarchy-depth", "-d", help="Depth of the parent / children hierarchy nodes. Can be 'full' or a valid integer", show_default=False) + ] + groups = Annotated[ + Optional[str], + typer.Option("--groups", "-g", help="Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups", show_default=False) + ] + owners = Annotated[ + Optional[str], + typer.Option("--owners", "-o", help="Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names", show_default=False) + ] + include_hierarchy_fields = Annotated[ + Optional[str], + typer.Option("--include-hierarchy-fields", "-i", help="List of sub fields to include for hierarchies. Only supports 'groups'", show_default=False) + ] + include_nested_fields = Annotated[ + Optional[str], + typer.Option("--include-nested-fields", "-in", help="List of sub fields to include for different types, for example team:members", show_default=False) + ] + include_owners = Annotated[ + Optional[bool], + typer.Option("--include-owners", "-io", help="Include ownership information for each entity in the response", show_default=False) + ] + include_links = Annotated[ + Optional[bool], + typer.Option("--include-links", "-l", help="Include links for each entity in the response", show_default=False) + ] + include_metadata = Annotated[ + Optional[bool], + typer.Option("--include-metadata", "-m", help="Include custom data for each entity in the response", show_default=False) + ] + dry_run = Annotated[ + Optional[bool], + typer.Option("--dry-run", "-dry", help="When true, only validates the descriptor contents and returns any errors or warnings", show_default=False) + ] + append_arrays = Annotated[ + Optional[bool], + typer.Option("--append-arrays", "-a", help="Default merge behavior is to replace arrays, set this to true to append arrays instead. For simple types, duplicate values will be removed from the merged array", show_default=False) + ] + fail_if_not_exist = Annotated[ + Optional[bool], + typer.Option("--fail-if-not-exist", "-n", help="Default behavior is to upsert the entity, if set command will fail (404) if the entity specified in x-cortex-tag does not exist.", show_default=False) + ] + git_repositories = Annotated[ + Optional[str], + typer.Option("--git-repositories", "-r", help="Supports only GitHub repositories in the org/repo format", show_default=False) + ] + types = Annotated[ + Optional[str], + typer.Option("--types", "-t", help="Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.", show_default=False) + ] + +@app.command(name="list") +def catalog_list( + ctx: typer.Context, + include_archived: CatalogCommandOptions.include_archived = False, + hierarchy_depth: CatalogCommandOptions.hierarchy_depth = 'full', + groups: CatalogCommandOptions.groups = None, + owners: CatalogCommandOptions.owners = None, + include_hierarchy_fields: CatalogCommandOptions.include_hierarchy_fields = None, + include_nested_fields: CatalogCommandOptions.include_nested_fields = None, + include_owners: CatalogCommandOptions.include_owners = False, + include_links: CatalogCommandOptions.include_links = False, + include_metadata: CatalogCommandOptions.include_metadata = False, + git_repositories: CatalogCommandOptions.git_repositories = None, + types: CatalogCommandOptions.types = None, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], + _print: CommandOptions._print = True, +): + """ + List entities in the catalog + """ + client = ctx.obj["client"] + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "ID=id", + "Tag=tag", + "Name=name", + "Type=type", + "Git Repository=git.repository", + ] + + params = { + "includeArchived": include_archived, + "hierarchyDepth": hierarchy_depth, + "groups": groups, + "owners": owners, + "includeHierarchyFields": include_hierarchy_fields, + "includeNestedFields": include_nested_fields, + "includeOwners": include_owners, + "includeLinks": include_links, + "includeMetadata": include_metadata, + "page": page, + "pageSize": page_size, + "gitRepositories": git_repositories, + "types": types, + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # for keys that can have multiple values, remove whitespace around comma and split on comma + for key in ['groups', 'owners', 'gitRepositories', 'types']: + if key in params: + params[key] = [x.strip() for x in params[key].split(',')] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def details( + ctx: typer.Context, + hierarchy_depth: CatalogCommandOptions.hierarchy_depth = 'full', + include_hierarchy_fields: CatalogCommandOptions.include_hierarchy_fields = None, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + no_headers: ListCommandOptions.no_headers = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], +): + """ + Get details for a specific entity in the catalog + """ + client = ctx.obj["client"] + + if table_output and csv_output: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "ID=id", + "Tag=tag", + "Name=name", + "Type=type", + "Git Repository=git.repository", + ] + + output_format = "table" if table_output else "csv" if csv_output else "json" + + params = { + "hierarchyDepth": hierarchy_depth, + "includeHierarchyFields": include_hierarchy_fields + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/catalog/" + tag, params=params) + + data = r if output_format == 'json' else [r] + print_output_with_context(ctx, data) + +@app.command() +def archive( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Archive an entity + """ + client = ctx.obj["client"] + + r = client.put("api/v1/catalog/" + tag + "/archive") + +@app.command() +def unarchive( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Unarchive an entity + """ + client = ctx.obj["client"] + + r = client.put("api/v1/catalog/" + tag + "/unarchive") + print_output_with_context(ctx, r) + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete an entity + """ + client = ctx.obj["client"] + + client.delete("api/v1/catalog/" + tag) + +@app.command() +def delete_by_type( + ctx: typer.Context, + types: CatalogCommandOptions.types = None, +): + """ + Dangerous operation that will delete all entities that are of the given type + """ + client = ctx.obj["client"] + + #TODO: check if types is a regex of form: ([-A-Za-z]+,)+ + + params = { + "types": types + } + + client.delete("api/v1/catalog", params=params) + + +@app.command() +def descriptor( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + Retrieve entity descriptor + """ + client = ctx.obj["client"] + + params = { + "yaml": str(yaml).lower() + } + + r = client.get("api/v1/catalog/" + tag + "/openapi", params=params) + if _print: + if yaml: + print(r) + else: + print_output_with_context(ctx, r) + #print(r) + else: + if yaml: + return(r) + else: + print_output_with_context(ctx, r) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing YAML content of entity; can be passed as stdin with -, example: -f-")] = None, + dry_run: CatalogCommandOptions.dry_run = False, + _print: CommandOptions._print = True, +): + """ + Create entity + """ + client = ctx.obj["client"] + + params = { + "dryRun": dry_run + } + + r = client.post("api/v1/open-api", data=file_input.read(), params=params, content_type="application/openapi;charset=UTF-8") + if _print: + print_output_with_context(ctx, r) + +@app.command() +def patch( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing YAML content of entity; can be passed as stdin with -, example: -f-")] = None, + delete_marker_value = typer.Option("__delete__", "--delete-marker-value", "-dmv", help="Delete keys with this value from the merged yaml, defaults to __delete__, if any values match this, they will not be included in merged YAML. For example my_value: __delete__ will remove my_value from the merged YAML."), + dry_run: CatalogCommandOptions.dry_run = False, + append_arrays: CatalogCommandOptions.append_arrays = False, + fail_if_not_exist: CatalogCommandOptions.fail_if_not_exist = False, +): + """ + Creates or updates an entity. If the YAML refers to an entity that already exists (as referenced by the x-cortex-tag), this API will merge the specified changes into the existing entity + """ + client = ctx.obj["client"] + + params = { + "dryRun":dry_run, + "appendArrays": append_arrays, + "deleteMarkerValue": delete_marker_value, + "failIfEntityDoesNotExist": fail_if_not_exist + } + + r = client.patch("api/v1/open-api", data=file_input.read(), params=params, content_type="application/openapi;charset=UTF-8") + print_output_with_context(ctx, r) + +@app.command() +def list_descriptors( + ctx: typer.Context, + yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), + types: CatalogCommandOptions.types = None, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + _print: CommandOptions._print = True, +): + """ + List entity descriptors + """ + client = ctx.obj["client"] + + params = { + "yaml": yaml, + "types": types, + "pageSize": page_size, + "page": page + } + + r = client.fetch_or_get("api/v1/catalog/descriptors", page, _print, params=params) + if not _print: + return(r) + +@app.command() +def gitops_log( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Retrieve most recent GitOps log for entity + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/gitops-logs") + print_output_with_context(ctx, r) + +@app.command() +def scorecard_scores( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Retrieve entity Scorecard scores + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/scorecards") + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py new file mode 100644 index 0000000..dca2f4e --- /dev/null +++ b/cortexapps_cli/commands/custom_data.py @@ -0,0 +1,200 @@ +import json +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +from rich import print_json + +app = typer.Typer(help="Custom data commands", no_args_is_help=True) + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +@app.command() +def add( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", "-o", help="When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed."), + key: str = typer.Option(None, "--key", "-k", help="The custom data key to create (only if file input not provided)."), + value: str = typer.Option(None, "--value", "-v", help="The value of the custom data key (only if file input not provided)."), + description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Add custom data for entity + + Format of JSON-formatted configuration file: + + { + "description": "string", + "key": "my-key", + "value": { + "nested": { + "objects": "are ok" + } + } + } + + Examples: + --------- + Single value: + { + "description": "A field to store CI/CD tool", + "key": "ci-cd-tool", + "value": "Jenkins" + } + } + + Nested values: + { + "description": "Custom field to store build metrics", + "key": "build-metrics", + "value": { + "2023-08-01": { + "success-rate": "50" + }, + "2023-08-02": { + "success-rate": "67" + } + } + } + """ + client = ctx.obj["client"] + + params = { + "force": force, + } + + if file_input: + if description or key or value: + raise typer.BadParameter("When providing a custom input definition file, do not specify any other custom data attributes") + data = json.loads("".join([line for line in file_input])) + else: + if not value: + raise typer.BadParameter("value is required if custom data file is not provided") + if not key: + raise typer.BadParameter("key is required if custom data file is not provided") + + data = { + "key": key, + "value": value + } + + if description: + data["description"] = description + + r = client.post("api/v1/catalog/" + tag + "/custom-data", data=data, params=params) + print_json(data=r) + +@app.command() +def bulk( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", "-o", help="When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed."), +): + """ + Add multiple key/values of custom data to multiple entities + """ + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + params = { + "force": force + } + + r = client.put("api/v1/catalog/custom-data", data=data, params=params) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + force: bool = typer.Option(False, "--force", "-o", help="When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed."), + key: str = typer.Option(..., "--key", "-k", help="The custom metadata key"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete custom data for entity + """ + client = ctx.obj["client"] + + params = { + "force": force, + "key": key + } + + r = client.delete("api/v1/catalog/" + tag + "/custom-data", params=params) + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + key: str = typer.Option(..., "--key", "-k", help="The custom metadata key"), +): + """ + Retrieve custom data for entity by key + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/custom-data/" + key) + + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, + #page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + #page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List custom data for entity + """ + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Id=id", + "Key=key", + "Value=value", + "Date=dateUpdated", + "Source=source", + ] + + #client.fetch_or_get("api/v1/catalog/" + tag + "/custom-data", page, params=params) + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag + "/custom-data", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/custom-data", params=params) + + data = r + print_output_with_context(ctx, data) diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py new file mode 100644 index 0000000..9c9b969 --- /dev/null +++ b/cortexapps_cli/commands/custom_events.py @@ -0,0 +1,248 @@ +from collections import defaultdict +from datetime import datetime +import json +from rich import print_json +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer( + help="Custom events commands", + no_args_is_help=True +) + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +@app.command() +def update_by_uuid( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + custom_data: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs (only if file input not provided."), + description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), + title: str = typer.Option(None, "--title", "-ti", help="The title of the custome event (only if file input not provided)."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + uuid: str = typer.Option(..., "--uuid", "-u", help="UUID of custom event."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event (only required if file input not provided)."), + url: str = typer.Option(None, "--url", help="The url of the custom event (optional, only required if file input not provided)."), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of custom event, defaults to current time (only if file input not provided)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Create custom event for entity + """ + + client = ctx.obj["client"] + + if file_input: + if description or title or custom_data or event or url: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + if timestamp: + data["timestamp"] = timestamp + + else: + data = { + "title": title, + "timestamp": timestamp, + "type": event, + "url": url, + } + + if description: + data["description"] = description + if url: + data["url"] = url + if custom_data: + data["customData"] = dict(custom_data) + + # convert datetime type to string + for k, v in data.items(): + if str(type(v)) == "": + data[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + r = client.put("api/v1/catalog/" + tag + "/custom-events/" + uuid, data=data) + print_json(data=r) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + custom_data: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs (only if file input not provided."), + description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), + title: str = typer.Option(None, "--title", "-ti", help="The title of the custome event (only if file input not provided)."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event (only required if file input not provided)."), + url: str = typer.Option(None, "--url", "-u", help="The url of the custom event (optional, only required if file input not provided)."), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of custom event, defaults to current time (only if file input not provided)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Create custom event for entity + """ + + client = ctx.obj["client"] + + if file_input: + if description or title or custom_data or event or url: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + if timestamp: + data["timestamp"] = timestamp + + else: + if not title: + raise typer.BadParameter("title is required if custom event file is not provided") + if not event: + raise typer.BadParameter("type is required if custom event file is not provided") + + data = { + "title": title, + "timestamp": timestamp, + "type": event, + "url": url, + } + + if description: + data["description"] = description + if url: + data["url"] = url + if custom_data: + data["customData"] = dict(custom_data) + + # convert datetime type to string + for k, v in data.items(): + if str(type(v)) == "": + data[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + r = client.post("api/v1/catalog/" + tag + "/custom-events", data=data) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event, defaults to all."), + timestamp: datetime = typer.Option(None, "--timestamp", "-ts", help="Optional timestamp of custom events to delete.", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Delete all custom events for an entity + """ + + client = ctx.obj["client"] + + params = { + "type": event, + "timestamp": timestamp + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # convert datetime type to string + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + r = client.delete("api/v1/catalog/" + tag + "/custom-events", params=params) + +@app.command() +def list( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event, defaults to all."), + timestamp: datetime = typer.Option(None, "--timestamp", "-ts", help="Optional timestamp of custom events to delete.", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List custom events for entity + """ + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size, + "timestamp": timestamp, + "type": event + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # convert datetime type to string + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "UUID=uuid", + "title=title", + "Description=description", + "Url=url", + "Timestamp=timestamp", + "Type=type", + "customData=customData", + ] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag + "/custom-events", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/custom-events", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + + #client.fetch_or_get("api/v1/catalog/" + tag + "/custom-events", page, prt, params=params) + +@app.command() +def get_by_uuid( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + uuid: str = typer.Option(..., "--uuid", "-u", help="UUID of custom event."), +): + """ + Get custom event by UUID + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/custom-events/" + uuid) + print_json(data=r) + +@app.command() +def delete_by_uuid( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + uuid: str = typer.Option(..., "--uuid", "-u", help="UUID of custom event."), +): + """ + Delete custom events by UUID + """ + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + tag + "/custom-events/" + uuid) diff --git a/cortexapps_cli/commands/custom_metrics.py b/cortexapps_cli/commands/custom_metrics.py new file mode 100644 index 0000000..4183cca --- /dev/null +++ b/cortexapps_cli/commands/custom_metrics.py @@ -0,0 +1,133 @@ +from collections import defaultdict +from datetime import datetime +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Custom metrics commands") + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return [] + result = [] + for value in values: + ts, v = value.split('=') + result.append({"timestamp": ts, "value": v}) + return result + +def _convert_datetime_to_string(params): + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + return params + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + start_date: datetime = typer.Option(None, "--start-date", "-s", help="Start date for the filter (inclusive). Default: 6 months", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + end_date: datetime = typer.Option(None, "--end-date", "-s", help="End date for the filter (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + List custom metrics data points for an entity + """ + + client = ctx.obj["client"] + + params = { + "startDate": start_date, + "endDate": end_date, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + params = _convert_datetime_to_string(params) + + client.fetch_or_get("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, page, prt, params=params) + +@app.command() +def add( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-s", help="Timestamp for the data point; cannot be earlier than 6 months", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + value: float = typer.Option(..., "--value", "-v", help="Value for the data point"), +): + """ + Add a single custom metric data point for entity + """ + + client = ctx.obj["client"] + + data = { + "timestamp": timestamp, + "value": value + } + + data = _convert_datetime_to_string(data) + + r = client.post("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, data=data) + +@app.command() +def add_in_bulk( + ctx: typer.Context, + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + series: list[str] | None = typer.Option(None, "--value", "-v", callback=_parse_key_value, help="List of timestamp=value pairs."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Add multiple custom metric data points for entity, can be provided in file, command line or combination of both + """ + + client = ctx.obj["client"] + + data = { + "series": [] + } + series_data = { + "series": series + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + if series: + for item in series: + data["series"].append(item) + + r = client.post("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag + "/bulk", data=data) + +@app.command() +def delete( + ctx: typer.Context, + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + start_date: datetime = typer.Option(None, "--start-date", "-s", help="Start date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + end_date: datetime = typer.Option(None, "--end-date", "-e", help="End date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Delete custom metric data points for entity + """ + + client = ctx.obj["client"] + + params = { + "startDate": start_date, + "endDate": end_date + } + params = _convert_datetime_to_string(params) + + r = client.delete("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, params=params) diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py new file mode 100644 index 0000000..e64736b --- /dev/null +++ b/cortexapps_cli/commands/dependencies.py @@ -0,0 +1,232 @@ +from collections import defaultdict +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Dependency commands", no_args_is_help=True) + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +@app.command() +def create( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + description: str = typer.Option("", "--description", "-d", help="The description of the dependency"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + metadata: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional metadata key=value pairs (only; will be merged with file input"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Create dependency from entity + """ + + client = ctx.obj["client"] + + if file_input: + if description or metadata or method or path or caller_tag or callee_tag: + raise typer.BadParameter("When providing a dependencies input file, do not specify any other dependency event attributes") + data = json.loads("".join([line for line in file_input])) + else: + params = { + } + + if method: + params["method"] = method + if path: + params["path"] = path + + data = { + "description": "", + "metadata": { + } + } + + if metadata: + data["metadata"] = dict(metadata) + + if description: + data["description"] = description + + r = client.post("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, data=data, params=params) + print_json(json.dumps(r)) + +@app.command() +def delete_in_bulk( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing dependency values to delete; can be passed as stdin with -, example: -f-")] = None, +): + """ + Delete dependencies in bulk, see https://docs.cortex.io/docs/api/delete-dependencies-in-bulk for format of input file + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.delete("api/v1/catalog/dependencies", data=data) + +@app.command() +def add_in_bulk( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing dependency values to create or update; can be passed as stdin with -, example: -f-")] = None, +): + """ + Create or update dependencies in bulk, see https://docs.cortex.io/docs/api/create-or-update-dependencies-in-bulk + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/catalog/dependencies", data=data) + +@app.command() +def delete_all( + ctx: typer.Context, + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), +): + """ + Deletes any outgoing dependencies that were created via the API from the entity + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + caller_tag + "/dependencies") + +@app.command() +def get_all( + ctx: typer.Context, + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + include_incoming: bool = typer.Option(True, "--include-incoming", "-i", help="Include incoming dependencies"), + include_outgoing: bool = typer.Option(False, "--include-outgoing", "-o", help="Include outgoing dependencies"), + page: int = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + Retrieve all dependencies for an entity + """ + + params = { + "includeIncoming": include_incoming, + "includeOutgoing": include_outgoing, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client = ctx.obj["client"] + + client.fetch_or_get("api/v1/catalog/" + caller_tag + "/dependencies", page, prt, params=params) + +@app.command() +def delete( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Delete a dependency from an entity + """ + + params = { + "method": method, + "path": path + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, params=params) + +@app.command() +def get( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Retrieve dependency between entities + """ + + params = { + "method": method, + "path": path + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, params=params) + + print_json(data=r) + +@app.command() +def update( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + description: str = typer.Option("", "--description", "-d", help="The description of the dependency"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + metadata: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional metadata key=value pairs; will be merged with file input"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Update dependency between entities + """ + + client = ctx.obj["client"] + + params = { + } + + if method: + params["method"] = method + if path: + params["path"] = path + + data = { + "description": "", + "metadata": { + } + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + # if metadata provided in file and command line, command line takes precedence + if metadata: + data["metadata"] = data["metadata"] | dict(metadata) + + if description: + data["description"] = description + + r = client.put("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, data=data, params=params) + print_json(data=r) diff --git a/cortexapps_cli/commands/deploys.py b/cortexapps_cli/commands/deploys.py new file mode 100644 index 0000000..e15173a --- /dev/null +++ b/cortexapps_cli/commands/deploys.py @@ -0,0 +1,242 @@ +from collections import defaultdict +from datetime import datetime +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Deploys commands", no_args_is_help=True) + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +class Type(str, Enum): + DEPLOY = "DEPLOY" + SCALE = "SCALE" + ROLLBACK = "ROLLBACK" + RESTART = "RESTART" + +@app.command() +def delete_by_filter( + ctx: typer.Context, + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), +): + """ + Filter and delete deploys by SHA hash, environment or type + """ + + client = ctx.obj["client"] + + if not sha and not environment and not type: + raise typer.BadParameter("At least one of sha, environment or type must be provided.") + + params = { + "environment": environment, + "sha": sha, + "type": type + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.delete("api/v1/catalog/deploys", params=params) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Dangerous endpoint that blows away deploys for all entities + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/deploys/all") + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), +): + """ + Delete deployments for entity + """ + + client = ctx.obj["client"] + + params = { + "environment": environment, + "sha": sha, + "type": type + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.delete("api/v1/catalog/" + tag + "/deploys", params=params) + +# 'list' is a keyword in python; naming the function 'list' will cause problems like this: +# TypeError: 'function' object is not subscriptable +# +# Because of this subsequent line in the file: +# customData: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs"), +# +# There is a collision between naming this function 'list' and then expecting to use list as the python built-in. +@app.command("list") +def deploys_list( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + List deployments for entity + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/catalog/" + tag + "/deploys", page, prt, params=params) + +@app.command() +def add( + ctx: typer.Context, + customData: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs"), + email: str = typer.Option(None, "--email", "-m", help="Email address of deployer"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + name: str = typer.Option(None, "--name", "-n", help="Name of deployer"), + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + title: str = typer.Option(None, "--title", "-ti", help="The title of the deploy"), + type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), + url: str = typer.Option(None, "--url", help="The Uniform Resource Locator(URL) of the deploy") +): + """ + Add deployment for entity + """ + + client = ctx.obj["client"] + + if file_input: + if email or environment or name or sha or title or type or url: + raise typer.BadParameter("When providing a deploy input file, do not specify any other deploy event attributes") + data = json.loads("".join([line for line in file_input])) + else: + + data = { + "customData": { + }, + "deployer": { + "email": email, + "name": name + }, + "environment": environment, + "sha": sha, + "timestamp": timestamp, + "title": title, + "type": type.value, + "url": url + } + + if customData: + data["customData"] = dict(customData) + data["timestamp"] = data["timestamp"].strftime('%Y-%m-%dT%H:%M:%SZ') + + r = client.post("api/v1/catalog/" + tag + "/deploys", data=data) + print_json(data=r) + +@app.command() +def delete_by_uuid( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + uuid: str = typer.Option(..., "--uuid", "-u", help="The Universally Unique Identifier (UUID) of the deploy") +): + """ + Delete deployment by uuid + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/" + tag + "/deploys/" + uuid) + +@app.command() +def update_by_uuid( + ctx: typer.Context, + customData: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs"), + email: str = typer.Option(None, "--email", "-m", help="Email address of deployer"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + name: str = typer.Option(None, "--name", "-n", help="Name of deployer"), + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + title: str = typer.Option(None, "--title", "-ti", help="The title of the deploy"), + deploy_type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), + url: str = typer.Option(None, "--url", help="The Uniform Resource Locator(URL) of the deploy"), + uuid: str = typer.Option(..., "--uuid", "-u", help="The Universally Unique Identifier (UUID) of the deploy") +): + """ + Update deployment for entity + """ + + client = ctx.obj["client"] + + if file_input: + if customData or email or environment or name or sha or title or deploy_type or url: + raise typer.BadParameter("When providing a deploy input file, do not specify any other deploy event attributes") + data = json.loads("".join([line for line in file_input])) + else: + + if not title or tag or deploy_type: + raise typer.BadParameter("When not providing a deploy input file, title and tag are required") + data = { + "environment": environment, + "sha": sha, + "type": deploy_type.value, + "timestamp": timestamp, + "title": title + } + + # remove any data valus that are None + data = {k: v for k, v in data.items() if v is not None} + + if customData: + data["customData"] = dict(customData) + if email or name: + data["deployer"] = {} + if email: + data["deployer"]["email"] = email + if name: + data["deployer"]["name"] = name + data["timestamp"] = data["timestamp"].strftime('%Y-%m-%dT%H:%M:%SZ') + + r = client.put("api/v1/catalog/" + tag + "/deploys/" + uuid, data=data) + print_json(data=r) diff --git a/cortexapps_cli/commands/discovery_audit.py b/cortexapps_cli/commands/discovery_audit.py new file mode 100644 index 0000000..5a4df23 --- /dev/null +++ b/cortexapps_cli/commands/discovery_audit.py @@ -0,0 +1,80 @@ +from collections import defaultdict +from datetime import datetime +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +class DiscoveryType(str, Enum): + APM_RESOURCE_NOT_DETECTED = "APM_RESOURCE_NOT_DETECTED" + AWS_RESOURCE_NOT_DETECTED = "AWS_RESOURCE_NOT_DETECTED" + AZURE_RESOURCE_NOT_DETECTED = "AZURE_RESOURCE_NOT_DETECTED" + ECS_RESOURCE_NOT_DETECTED = "ECS_RESOURCE_NOT_DETECTED" + GOOGLE_CLOUD_RESOURCE_NOT_DETECTED = "GOOGLE_CLOUD_RESOURCE_NOT_DETECTED" + NEW_APM_RESOURCE = "NEW_APM_RESOURCE" + NEW_AWS_RESOURCE = "NEW_AWS_RESOURCE" + NEW_AZURE_RESOURCE = "NEW_AZURE_RESOURCE" + NEW_ECS_RESOURCE = "NEW_ECS_RESOURCE" + NEW_GOOGLE_CLOUD_RESOURCE = "NEW_GOOGLE_CLOUD_RESOURCE" + NEW_K8S_RESOURCE = "NEW_K8S_RESOURCE" + NEW_REPOSITORY = "NEW_REPOSITORY" + REPOSITORY_ARCHIVED = "REPOSITORY_ARCHIVED" + REPOSITORY_DELETED = "REPOSITORY_DELETED" + +class DiscoverySource(str, Enum): + AWS = "AWS" + AZURE_DEVOPS = "AZURE_DEVOPS" + AZURE_RESOURCES = "AZURE_RESOURCES" + BITBUCKET = "BITBUCKET" + DATADOG = "DATADOG" + DYNATRACE = "DYNATRACE" + ECS = "ECS" + GCP = "GCP" + GITHUB = "GITHUB" + GITLAB = "GITLAB" + INSTANA = "INSTANA" + K8S = "K8S" + LIGHTSTEP = "LIGHTSTEP" + LAMBDA = "LAMBDA" + LAMBDA_CLOUD_CONTROL = "LAMBDA_CLOUD_CONTROL" + NEWRELIC = "NEWRELIC" + SERVICENOW = "SERVICENOW" + SERVICENOW_DOMAIN = "SERVICENOW_DOMAIN" + +app = typer.Typer(help="Discovery Audit commands", no_args_is_help=True) + +@app.command() +def get( + ctx: typer.Context, + include_ignored: bool = typer.Option(False, "--include-ignored", "-ii", help="Include ignore events in result"), + type: DiscoveryType = typer.Option(None, "--type", "-ty", help="The type of audit event"), + source: DiscoverySource = typer.Option(None, "--source", "-s", help="The source of the audit event"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + This report shows you recent changes in your environment that aren't reflected in Cortex, including newly created repositories, services, and resources that we discover from your integrations or which were deleted in the environment but corresponding Cortex entities are still present. + """ + + client = ctx.obj["client"] + + params = { + "includeIgnored": include_ignored, + "page": page, + "pageSize": page_size, + "source": source, + "type": type + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/discovery-audit", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/discovery-audit", params=params) + + print_json(data=r) diff --git a/cortexapps_cli/commands/docs.py b/cortexapps_cli/commands/docs.py new file mode 100644 index 0000000..8ffc27e --- /dev/null +++ b/cortexapps_cli/commands/docs.py @@ -0,0 +1,63 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated +import yaml + +app = typer.Typer(help="Docs commands", no_args_is_help=True) + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(None, "--name", "-n", help="Name of the OpenAPI spec to return. If you have multiple OpenAPI specs configured for your entity as x-cortex-links, use this parameter to ensure the correct spec is returned. If this parameter is not specified, we will return the first OpenAPI spec found."), +): + """ + Get OpenAPI docs for entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/catalog/" + tag + "/documentation/openapi", params=params) + + print_json(data=r) + +@app.command() +def update( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing stringified JSON representation of the OpenAPI spec; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update OpenAPI docs for entity + """ + + client = ctx.obj["client"] + + yaml_content = yaml.safe_load("".join([line for line in file_input])) + + data = json.dumps({"spec": "" + str(yaml_content) + ""}) + + r = client.put("api/v1/catalog/" + tag + "/documentation/openapi", data=data) + + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete OpenAPI docs for entity + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + tag + "/documentation/openapi") diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py new file mode 100644 index 0000000..af758c9 --- /dev/null +++ b/cortexapps_cli/commands/entity_types.py @@ -0,0 +1,129 @@ +from collections import defaultdict +from datetime import datetime +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer(help="Entity Types commands", no_args_is_help=True) + +@app.command() +def list( + ctx: typer.Context, + include_built_in: bool = typer.Option(False, "--include-built-in", "-ib", help="When true, returns the built-in entity types that Cortex provides, such as rds and s3, defaults to false"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List entity types, excludes Cortex default types of service, domain, and team + """ + + client = ctx.obj["client"] + + params = { + "includeBuiltIn": include_built_in, + "page": page, + "pageSize": page_size, + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Type=type", + "Source=source", + "Name=name", + "Description=description", + ] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/definitions", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/definitions", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def delete( + ctx: typer.Context, + entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), +): + """ + Delete entity type + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/definitions/" + entity_type) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom entity definition; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", help="Recreate entity if it already exists."), +): + """ + Create entity type + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + entity_type = data['type'] + entities = list(ctx=ctx, _print=False, include_built_in=False) + + # Check if any definition has type == 'tool-test' + exists = any(entity.get('type') == entity_type for entity in entities.get('definitions', [])) + if entities is None or not exists: + client.post("api/v1/catalog/definitions", data=data) + +@app.command() +def update( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom entity definition; can be passed as stdin with -, example: -f-")] = None, + entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), +): + """ + Update entity type + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + r = client.update("api/v1/catalog/definitions/" + entity_type, data=data) + +@app.command() +def get( + ctx: typer.Context, + entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), + _print: CommandOptions._print = True, +): + """ + Retrieve entity type + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/definitions/" + entity_type) + if _print: + print_json(data=r) + else: + return r diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py new file mode 100644 index 0000000..616b6d8 --- /dev/null +++ b/cortexapps_cli/commands/gitops_logs.py @@ -0,0 +1,75 @@ +from enum import Enum +#import json +#from rich import print_json +import typer +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context + +app = typer.Typer( + help="GitOps Logs commands", + no_args_is_help=True +) + +class Operation(str, Enum): + ARCHIVED = "ARCHIVED" + CREATED = "CREATED" + NO_CHANGE = "NO_CHANGE" + UPDATED = "UPDATED" + +@app.command() +def get( + ctx: typer.Context, + file: str = typer.Option(None, "--file", "-f", help="File name within the repository"), + file_name: str = typer.Option(None, "--file-name", "-fn", help="File name within the repository; TODO: what is difference with this and file parm?"), + repository: str = typer.Option(None, "--repository", "-r", help="Repository name as defined in your Git provider"), + sha: str = typer.Option(None, "--sha", "-s", help="Commit SHA"), + operation: Operation = typer.Option(None, "--operation", "-o", help="One of CREATED, UPDATED, ARCHIVED, NO_CHANGE"), + error_only: bool = typer.Option(False, "--error-only", "-eo", help="Only include entries with errors"), + #page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + #page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + Retrieve GitOps logs. API key must have the 'View GitOps logs' permission. + """ + + client = ctx.obj["client"] + + params = { + "errorOnly": error_only, + "file": file, + "fileName": file_name, + "operation": operation, + "page": page, + "pageSize": page_size, + "repository": repository, + "sha": sha + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Files=files", + "repositoryName=repository.repositoryName", + "provider=repository.provider", + "Commit=commit", + "Date=dateCreated", + ] + + #prt = True + #client.fetch_or_get("api/v1/gitops-logs", page, prt, params=params) + if page is None: + r = client.fetch("api/v1/gitops-logs", params=params) + else: + r = client.get("api/v1/gitops-logs", params=params) + + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/groups.py b/cortexapps_cli/commands/groups.py new file mode 100644 index 0000000..31a87db --- /dev/null +++ b/cortexapps_cli/commands/groups.py @@ -0,0 +1,87 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Groups commands", no_args_is_help=True) + +@app.command() +def get( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + Get groups for entity. + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/catalog/" + tag_or_id + "/groups", page, prt, params=params) + +@app.command() +def add( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + groups: str = typer.Option(None, "--groups", "-g", help="Comma-delimited list of groups to add to the entity") +): + """ + Add groups to entity. + """ + + client = ctx.obj["client"] + + if file_input and groups: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if not file_input and not groups: + raise typer.BadParameter("Only one of --file-input or --groups is required") + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + data = { + "groups": [{"tag": x.strip()} for x in groups.split(',')] + } + + r = client.put("api/v1/catalog/" + tag_or_id + "/groups", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + groups: str = typer.Option(None, "--groups", "-g", help="Comma-delimited list of groups to delete from the entity") +): + """ + Delete groups from entity. + """ + + client = ctx.obj["client"] + + if file_input and groups: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if not file_input and not groups: + raise typer.BadParameter("Only one of --file-input or --groups is required") + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + data = { + "groups": [{"tag": x.strip()} for x in groups.split(',')] + } + + r = client.delete("api/v1/catalog/" + tag_or_id + "/groups", data=data) diff --git a/cortexapps_cli/commands/initiatives.py b/cortexapps_cli/commands/initiatives.py new file mode 100644 index 0000000..164bcc3 --- /dev/null +++ b/cortexapps_cli/commands/initiatives.py @@ -0,0 +1,121 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer( + help="Initiatives commands", + no_args_is_help=True +) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing JSON body of request, can be passed as stdin with -, example: -f-")] = None, +): + """ + Create an Initiative. API key must have the Edit Initiative permission. + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + client.post("api/v1/initiatives", data=data) + +@app.command() +def update( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing JSON body of request, can be passed as stdin with -, example: -f-")] = None, + cid: str = typer.Option(..., "--cid", "-c", help="Unique Cortex ID for the initiative"), +): + """ + Update an Initiative. API key must have the Edit Initiative permission. + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + client.put("api/v1/initiatives/" + cid, data=data) + +@app.command() +def delete( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="Unique Cortex ID for the initiative"), +): + """ + Delete initiative. API key must have the Edit Initiatives permission. + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/initiatives/" + cid) + +@app.command() +def list( + ctx: typer.Context, + include_drafts: bool = typer.Option(False, "--include-drafts", "-d", help="Whether scorecard in draft mode should be included"), + include_expired: bool = typer.Option(False, "--include-expired", "-e", help="Whether scorecard in draft mode should be included"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List initiatives. API key must have the View Initiatives permission. + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size, + "includeDrafts": include_drafts, + "includeExpired": include_expired + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "CId=cid", + "Name=name", + "Description=description", + "TargetDate=targetDate", + "ScorecardTag=scorecardTag", + "ScorecardName=scorecardName", + "IsDraft=isDraft", + ] + + if page is None: + r = client.fetch("api/v1/initiatives", params=params) + else: + r = client.get("api/v1/initiatives", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def get( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="Unique Cortex ID for the initiative"), +): + """ + Get initiative. API key must have the View Initiatives permission. + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/initiatives/" + cid) + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py new file mode 100644 index 0000000..5e2f7c7 --- /dev/null +++ b/cortexapps_cli/commands/integrations.py @@ -0,0 +1,35 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +import cortexapps_cli.commands.integrations_commands.aws as aws +import cortexapps_cli.commands.integrations_commands.azure_resources as azure_resources +import cortexapps_cli.commands.integrations_commands.azure_devops as azure_devops +import cortexapps_cli.commands.integrations_commands.circleci as circleci +import cortexapps_cli.commands.integrations_commands.coralogix as coralogix +import cortexapps_cli.commands.integrations_commands.datadog as datadog +import cortexapps_cli.commands.integrations_commands.github as github +import cortexapps_cli.commands.integrations_commands.gitlab as gitlab +import cortexapps_cli.commands.integrations_commands.incidentio as incidentio +import cortexapps_cli.commands.integrations_commands.launchdarkly as launchdarkly +import cortexapps_cli.commands.integrations_commands.newrelic as newrelic +import cortexapps_cli.commands.integrations_commands.pagerduty as pagerduty +import cortexapps_cli.commands.integrations_commands.prometheus as prometheus +import cortexapps_cli.commands.integrations_commands.sonarqube as sonarqube + +app = typer.Typer(help="Integrations commands", no_args_is_help=True) +app.add_typer(aws.app, name="aws") +app.add_typer(azure_resources.app, name="azure-resources") +app.add_typer(azure_devops.app, name="azure-devops") +app.add_typer(circleci.app, name="circleci") +app.add_typer(coralogix.app, name="coralogix") +app.add_typer(datadog.app, name="datadog") +app.add_typer(github.app, name="github") +app.add_typer(gitlab.app, name="gitlab") +app.add_typer(incidentio.app, name="incidentio") +app.add_typer(launchdarkly.app, name="launchdarkly") +app.add_typer(newrelic.app, name="newrelic") +app.add_typer(pagerduty.app, name="pagerduty") +app.add_typer(prometheus.app, name="prometheus") +app.add_typer(sonarqube.app, name="sonarqube") diff --git a/cortexapps_cli/commands/integrations_commands/aws.py b/cortexapps_cli/commands/integrations_commands/aws.py new file mode 100644 index 0000000..4f94a29 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/aws.py @@ -0,0 +1,221 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="AWS commands", no_args_is_help=True) + +# Make this a common client function? + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"accountId": a, "role": r}) + return result + +def _parse_key_value_types(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"type": a, "enabled": r}) + return result + +@app.command() +def add( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), + role: str = typer.Option(..., "--role", "-r", help="The IAM role Cortex would be assuming"), +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + data = { + "accountId": account_id, + "role": role + } + + r = client.post("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/aws/configurations/" + accountId) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/aws/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/aws/configurations/" + accountId) + print_json(data=r) + +@app.command("list") +def aws_list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/aws/configurations") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + configurations: list[str] | None = typer.Option(None, "--configurations", "-c", callback=_parse_key_value, help="List of account=role pairs (only if file input not provided."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update configurations + """ + + client = ctx.obj["client"] + + data = { + "configurations": [] + } + series_data = { + "configurations": configurations + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + if configurations: + for item in configurations: + data["configurations"].append(item) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/aws/configurations/validate" + accountId) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/aws/configurations") + print_json(data=r) + +@app.command() +def list_types( + ctx: typer.Context, + include_disabled: bool = typer.Option(False, "--include-disabled", "-i", help="When true, includes all types supported"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results") +): + """ + List AWS types that have been imported + """ + + client = ctx.obj["client"] + + params = { + "includeDisabled": include_disabled, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/aws/types", params=params) + print_json(data=r) + +@app.command() +def update_types( + ctx: typer.Context, + types: list[str] | None = typer.Option(None, "--types", "-t", callback=_parse_key_value_types, help="List of type=True|False pairs (only if file input not provided."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON File containing AWS types that should be discovered and imported into catalog; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update configured AWS types + """ + + client = ctx.obj["client"] + + data = { + "types": [] + } + series_data = { + "types": types + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + if not types: + raise typer.BadParameter("One of --types or --file must be provided.") + + if types: + for item in types: + data["types"].append(item) + + print(data) + r = client.put("api/v1/aws/types", data=data) + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/azure_devops.py b/cortexapps_cli/commands/integrations_commands/azure_devops.py new file mode 100644 index 0000000..510e9ae --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/azure_devops.py @@ -0,0 +1,174 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Azure Devops commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + organization_slug: str = typer.Option(..., "--organization-slug", "-o", help="Identifier for organization"), + personal_access_token: str = typer.Option(..., "--pat", "-p", help="Personal Access Token"), + username: str = typer.Option(..., "--username", "-u", help="Username"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or is_default or host or organization_slug or personal_access_token or username: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "host": host, + "isDefault": is_default, + "organizationSlug": organization_slug, + "personalAccessToken": personal_access_token, + "username": username + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/azure-devops/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/azure-devops/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/azure_resources.py b/cortexapps_cli/commands/integrations_commands/azure_resources.py new file mode 100644 index 0000000..47021eb --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/azure_resources.py @@ -0,0 +1,258 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Azure Resources commands", no_args_is_help=True) + +# Make this a common client function? + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"accountId": a, "role": r}) + return result + +def _parse_key_value_types(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"type": a, "enabled": r}) + return result + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + organization_slug: str = typer.Option(..., "--organization-slug", "-o", help="Identifier for organization"), + personal_access_token: str = typer.Option(..., "--pat", "-p", help="Personal Access Token"), + username: str = typer.Option(..., "--username", "-u", help="Username"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or is_default or host or organization_slug or personal_access_token or username: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "host": host, + "isDefault": is_default, + "organizationSlug": organization_slug, + "personalAccessToken": personal_access_token, + "username": username + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/azure-devops/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-resources/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-resources/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-resources/configuration/" + alias) + print_json(data=r) + +@app.command("list") +def azure_resources_list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-resources/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-resources/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/azure-resources/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-resources/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-resources/configurations") + print_json(data=r) + +@app.command() +def list_types( + ctx: typer.Context, + include_disabled: bool = typer.Option(False, "--include-disabled", "-i", help="When true, includes all types supported"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results") +): + """ + List AWS types that have been imported + """ + + client = ctx.obj["client"] + + params = { + "includeDisabled": include_disabled, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/azure-resources/types", params=params) + print_json(data=r) + +@app.command() +def update_types( + ctx: typer.Context, + types: list[str] | None = typer.Option(None, "--types", "-t", callback=_parse_key_value_types, help="List of type=True|False pairs (only if file input not provided."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON File containing types that should be discovered and imported into catalog; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update configured Azure Resources types + """ + + client = ctx.obj["client"] + + data = { + "types": [] + } + series_data = { + "types": types + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + if not types: + raise typer.BadParameter("One of --types or --file must be provided.") + + if types: + for item in types: + data["types"].append(item) + + print(data) + r = client.put("api/v1/azure-resources/types", data=data) + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/circleci.py b/cortexapps_cli/commands/integrations_commands/circleci.py new file mode 100644 index 0000000..48ca1f0 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/circleci.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="CircleCI commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/circleci/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/circleci/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/circleci/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/circleci/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/circleci/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/circleci/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/circleci/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/circleci/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/circleci/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/coralogix.py b/cortexapps_cli/commands/integrations_commands/coralogix.py new file mode 100644 index 0000000..e23f659 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/coralogix.py @@ -0,0 +1,179 @@ +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Coralogix commands", no_args_is_help=True) + +class Region(str, Enum): + US1 = "US1" + US2 = "US1" + EU1 = "US1" + EU2 = "US1" + AP1 = "AP1" + AP2 = "AP2" + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + region: Region = typer.Option(..., "--region", "-r", help="Region of configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or region: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "isDefault": is_default, + "region": region + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/coralogix/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/coralogix/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/coralogix/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/coralogix/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/coralogix/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/coralogix/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/coralogix/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/coralogix/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/coralogix/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/datadog.py b/cortexapps_cli/commands/integrations_commands/datadog.py new file mode 100644 index 0000000..2ebea20 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/datadog.py @@ -0,0 +1,178 @@ +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Datadog commands", no_args_is_help=True) + +class Region(str, Enum): + US1 = "US1" + US3 = "US3" + US5 = "US5" + US1_FED = "US1_FED" + EU1 = "EU1" + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + region: Region = typer.Option(..., "--region", "-r", help="Region of configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/datadog/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/datadog/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/datadog/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/datadog/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/datadog/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/datadog/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/datadog/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/datadog/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/datadog/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/github.py b/cortexapps_cli/commands/integrations_commands/github.py new file mode 100644 index 0000000..da2a4ea --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/github.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="GitHub commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/github/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/github/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/github/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/github/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/github/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/github/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/github/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/github/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/github/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/gitlab.py b/cortexapps_cli/commands/integrations_commands/gitlab.py new file mode 100644 index 0000000..ba7408c --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/gitlab.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="GitLab commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/gitlab/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/gitlab/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/gitlab/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/gitlab/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/gitlab/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/gitlab/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/gitlab/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/gitlab/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/gitlab/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/incidentio.py b/cortexapps_cli/commands/integrations_commands/incidentio.py new file mode 100644 index 0000000..4f13227 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/incidentio.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="IncidentIO commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/incidentio/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/incidentio/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/incidentio/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/incidentio/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/incidentio/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/incidentio/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/incidentio/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/incidentio/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/incidentio/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/launchdarkly.py b/cortexapps_cli/commands/integrations_commands/launchdarkly.py new file mode 100644 index 0000000..eefca99 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/launchdarkly.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="LaunchDarkly commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/launchdarkly/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/launchdarkly/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/launchdarkly/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/launchdarkly/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/launchdarkly/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/launchdarkly/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/launchdarkly/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/launchdarkly/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/launchdarkly/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/newrelic.py b/cortexapps_cli/commands/integrations_commands/newrelic.py new file mode 100644 index 0000000..eed0a4a --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/newrelic.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="New Relic commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/newrelic/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/newrelic/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/newrelic/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/newrelic/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/newrelic/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/newrelic/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/newrelic/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/newrelic/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/newrelic/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/pagerduty.py b/cortexapps_cli/commands/integrations_commands/pagerduty.py new file mode 100644 index 0000000..1774206 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/pagerduty.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="PagerDuty commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/pagerduty/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/pagerduty/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/pagerduty/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/pagerduty/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/pagerduty/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/pagerduty/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/pagerduty/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/pagerduty/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/pagerduty/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/prometheus.py b/cortexapps_cli/commands/integrations_commands/prometheus.py new file mode 100644 index 0000000..9623b31 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/prometheus.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Prometheus commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/prometheus/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/prometheus/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/prometheus/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/prometheus/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/prometheus/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/prometheus/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/prometheus/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/prometheus/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/prometheus/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/sonarqube.py b/cortexapps_cli/commands/integrations_commands/sonarqube.py new file mode 100644 index 0000000..81b160a --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/sonarqube.py @@ -0,0 +1,170 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="SonarQube commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/sonarqube/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/sonarqube/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/sonarqube/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/sonarqube/configuration/" + alias) + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/sonarqube/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/sonarqube/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/sonarqube/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/sonarqube/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/sonarqube/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/ip_allowlist.py b/cortexapps_cli/commands/ip_allowlist.py new file mode 100644 index 0000000..bd5a6d6 --- /dev/null +++ b/cortexapps_cli/commands/ip_allowlist.py @@ -0,0 +1,115 @@ +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output, print_json +import json +import ast + +app = typer.Typer(help="IP Allowlist commands", no_args_is_help=True) + +@app.command() +def get( + ctx: typer.Context, + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: CommandOptions._print = True, +): + """ + Get allowlist of IP addresses & ranges + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if _print: + r = client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) + else: + r = client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) + if r is None: + data = { + "entries": [] + } + else: + data = { + "entries": r['entries'] + } + return json.dumps(data, indent=2) + +@app.command() +def replace( + ctx: typer.Context, + addresses: str = typer.Option(None, "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", "-o", help="When true, entries will be updated even if the list doesn't contain the requestor's IP address"), + _print: CommandOptions._print = True, +): + """ + Replace existing allowlist with provided list of IP addresses & ranges + """ + + client = ctx.obj["client"] + if not addresses and not file_input: + raise typer.BadParameter("One of --addresses and --file-input is required") + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + data = { + "entries": [{"address": x.split(':')[0], "description": None if len(x.split(':')) < 2 else x.split(':')[1]} for x in addresses.split(',')] + } + + params = { + "force": force, + } + + r = client.put("api/v1/ip-allowlist", data=data, params=params) + + if _print: + print_json(data=r) + else: + return(r) + + +@app.command() +def validate( + ctx: typer.Context, + addresses: str = typer.Option(..., "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'") +): + """ + Validates allowlist of IP addresses & ranges + """ + + client = ctx.obj["client"] + + data = { + "entries": [{"address": x.split(':')[0], "description": None if len(x.split(':')) < 2 else x.split(':')[1]} for x in addresses.split(',')] + } + + r = client.post("api/v1/ip-allowlist/validate", data=data) + + print_json(data=r) + +@app.command() +def remove_all( + ctx: typer.Context, +): + """ + Remove all entries from allowlist + """ + + client = ctx.obj["client"] + + data = { + "entries": [] + } + + r = client.put("api/v1/ip-allowlist", data=data) + + print_json(data=r) diff --git a/cortexapps_cli/commands/on_call.py b/cortexapps_cli/commands/on_call.py new file mode 100644 index 0000000..f5504a6 --- /dev/null +++ b/cortexapps_cli/commands/on_call.py @@ -0,0 +1,33 @@ +import typer +import json +from rich import print_json + +app = typer.Typer(help="On Call commands", no_args_is_help=True) + +@app.command() +def get( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Retrieve current on-call for entity + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag_or_id + "/integrations/oncall/current") + print_json(data=r) + +@app.command() +def get_registration( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Retrieve on-call registration for entity + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag_or_id + "/integrations/oncall/registration") + print_json(data=r) diff --git a/cortexapps_cli/commands/packages.py b/cortexapps_cli/commands/packages.py new file mode 100644 index 0000000..06707cf --- /dev/null +++ b/cortexapps_cli/commands/packages.py @@ -0,0 +1,72 @@ +import json +from rich import print_json +import typer +import cortexapps_cli.commands.packages_commands.go as go +import cortexapps_cli.commands.packages_commands.java as java +import cortexapps_cli.commands.packages_commands.python as python +import cortexapps_cli.commands.packages_commands.node as node +import cortexapps_cli.commands.packages_commands.nuget as nuget +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer( + help="Packages commands", + no_args_is_help=True +) + +app.add_typer(go.app, name="go") +app.add_typer(java.app, name="java") +app.add_typer(python.app, name="python") +app.add_typer(node.app, name="node") +app.add_typer(nuget.app, name="nuget") + +@app.command() +def list( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List packages for entity + """ + + client = ctx.obj["client"] + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Id=id", + "PackageType=packageType", + "Name=name", + "Version=version", + "DateCreated=dateCreated", + ] + + # NOTE: packages list is not paginated, so no if-else that includes client.fetch. + r = client.get("api/v1/catalog/" + tag_or_id + "/packages") + print_output_with_context(ctx, r) + +@app.command() +def delete_all( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete all packages for entity + """ + + client = ctx.obj["client"] + + response = client.get("api/v1/catalog/" + tag_or_id + "/packages") + for package in response: + name = package['name'] + package_type = package['packageType'] + if package_type == "NUGET": + package_path = "dotnet/nuget" + else: + package_path = package_type.lower() + client.delete("api/v1/catalog/" + tag_or_id + "/packages/" + package_path, params={"name": name}) diff --git a/cortexapps_cli/commands/packages_commands/go.py b/cortexapps_cli/commands/packages_commands/go.py new file mode 100644 index 0000000..015a001 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/go.py @@ -0,0 +1,37 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Go commands") + +@app.command() +def upload( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of go.sum; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload go.sum package + """ + + client = ctx.obj["client"] + + #client.post("api/v1/catalog/" + tag_or_id + "/packages/go/gosum", data=package_input.read(), content_type='application/text') + client.post("api/v1/catalog/" + tag_or_id + "/packages/go/gosum", data=package_input.read()) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete go package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/go", params=params) diff --git a/cortexapps_cli/commands/packages_commands/java.py b/cortexapps_cli/commands/packages_commands/java.py new file mode 100644 index 0000000..91beec7 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/java.py @@ -0,0 +1,51 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Java commands") + +@app.command() +def upload_single( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing package name and version; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload single Java package + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/java", data=package_input.read()) + +@app.command() +def upload_multiple( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing multiple package names and versions; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload multiple Java packages + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/java/bulk", data=package_input.read()) + + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete Java package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/java", params=params) diff --git a/cortexapps_cli/commands/packages_commands/node.py b/cortexapps_cli/commands/packages_commands/node.py new file mode 100644 index 0000000..6e592f7 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/node.py @@ -0,0 +1,65 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Node commands") + +@app.command() +def upload_package_json( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of package.json; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload node package.json file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/node/package-json", data=package_input.read()) + +@app.command() +def upload_package_lock( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of package.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload node package-lock.json file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/node/package-lock", data=package_input.read()) + +@app.command() +def upload_yarn_lock( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of yarn.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload node yarn.lock file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/node/yarn-lock", data=package_input.read()) + + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete node package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/node", params=params) diff --git a/cortexapps_cli/commands/packages_commands/nuget.py b/cortexapps_cli/commands/packages_commands/nuget.py new file mode 100644 index 0000000..6b745ef --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/nuget.py @@ -0,0 +1,50 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="NuGet commands") + +@app.command() +def upload_csproj( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of NuGet csproj file; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload NuGet csproj file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/dotnet/nuget/csproj", data=package_input.read()) + +@app.command() +def upload_packages_lock( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of NuGet packages.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload NuGet packages.lock file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/dotnet/nuget/packages-lock", data=package_input.read()) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete NuGet package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/dotnet/nuget", params=params) diff --git a/cortexapps_cli/commands/packages_commands/python.py b/cortexapps_cli/commands/packages_commands/python.py new file mode 100644 index 0000000..6efac78 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/python.py @@ -0,0 +1,51 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Python commands") + +@app.command() +def upload_pipfile( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of pipfile.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload python pipfile.lock file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/python/pipfile", data=package_input.read(), content_type='application/text') + +@app.command() +def upload_requirements( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of requirements.txt; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload python requirements.txt file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/python/requirements", data=package_input.read(), content_type='application/text') + + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete python package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/python", params=params) diff --git a/cortexapps_cli/commands/plugins.py b/cortexapps_cli/commands/plugins.py new file mode 100644 index 0000000..65d8343 --- /dev/null +++ b/cortexapps_cli/commands/plugins.py @@ -0,0 +1,153 @@ +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output +from rich import print_json +from typing_extensions import Annotated +import json +import typer +import re +from urllib.error import HTTPError + +app = typer.Typer( + help="Plugins commands", + no_args_is_help=True +) + +@app.command() +def list( + ctx: typer.Context, + include_drafts: bool = typer.Option(False, "--include-drafts", "-i", help="Also include plugins that are in draft mode"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + Retrieve a list of all plugins, excluding drafts + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Name=name", + "Tag=tag", + "Description=description", + ] + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/plugins", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/plugins", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of plugin using schema defined at https://docs.cortex.io/docs/api/create-plugin")] = None, + force: bool = typer.Option(False, "--force", help="Recreate entity if it already exists."), +): + """ + Create a new plugin + """ + + client = ctx.obj["client"] + + data = json.loads(file_input.read()) + + if force: + plugins = list(ctx, _print=False) + plugin_tags = [plugin["tag"] for plugin in plugins["plugins"]] + + tag = data['tag'] + if tag in plugin_tags: + # Remove the 'tag' attribute if it exists + data.pop("tag", None) + r = client.put("api/v1/plugins/" + tag, data, raw_response=True) + else: + r = client.post("api/v1/plugins", data, raw_response=True) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Delete a plugin by tag + """ + + client = ctx.obj["client"] + + client.delete("api/v1/plugins/" + tag_or_id) + +@app.command() +def get( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + include_blob: bool = typer.Option(False, "--include-blob", "-i", help="When true, returns the plugin blob. Defaults to false."), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + Retrieve the metadata of a plugin by tag + """ + + client = ctx.obj["client"] + + params = { + "includeBlob": include_blob, + } + + r = client.get("api/v1/plugins/" + tag_or_id, params=params) + if _print: + print_json(data=r) + else: + # Optionally replace raw newlines inside known problem keys + #fixed = str(r).replace('\n', '\\n') # crude but often works + + #data = json.loads(fixed) + #return(json.dumps(data, indent=2)) + #raw_text = r.text + + # Replace unescaped newlines inside string values with escaped \n + # WARNING: This is a heuristic and assumes newlines only appear in strings + #safe_text = re.sub(r'(? dict: + if option is None: + return {} + try: + return dict([param.split('=') for param in option]) + except: + raise typer.BadParameter("Invalid parameter format, use Name=value") + +def guess_content_type(data: str) -> str: + try: + json.loads(data) + return 'application/json' + except json.JSONDecodeError: + try: + yaml_data = yaml.safe_load(data) + if isinstance(yaml_data, dict): + if 'openapi' in yaml_data: + return 'application/openapi;charset=utf-8' + return 'application/yaml' + return 'text/plain' + except yaml.YAMLError: + return 'text/plain' + +class RawCommandOptions: + endpoint = typer.Option(..., "--endpoint", "-e", help="API endpoint", show_default=False) + headers = Annotated[ + Optional[List[str]], + typer.Option("--headers", "-H", help="Headers to include in the request, in the format HeaderName=value", show_default=False) + ] + params = Annotated[Optional[List[str]], typer.Option("--params", "-P", help="Parameters to include in the request, in the format ParamName=value", show_default=False)] + input_file = Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the request body from, use - for stdin")] + content_type = typer.Option(None, "--content-type", "-c", help="Content type of the request body (leave blank to guess)") + + +@app.command() +def get( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], +): + """ + Make a GET request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + r = client.get(endpoint, headers=req_headers, params=req_params) + print_json(data=r) + +@app.command() +def fetch( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], +): + """ + Make a GET request to the API, and automatically fetch all pages + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + r = client.fetch(endpoint, headers=req_headers, params=req_params) + print_json(json.dumps(r)) + +@app.command() +def delete( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], +): + """ + Make a DELETE request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + r = client.delete(endpoint, headers=req_headers, params=req_params) + if (r): + print_json(json.dumps(r)) + +@app.command() +def post( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], + content_type: str = RawCommandOptions.content_type, + input: RawCommandOptions.input_file = '-' +): + """ + Make a POST request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + data = "".join([line for line in input]) + content_type = content_type or guess_content_type(data) + r = client.post(endpoint, headers=req_headers, params=req_params, data=data, raw_body=True, content_type=content_type) + if input == sys.stdin and sys.stdin.isatty() and sys.stdout.isatty(): + print("") + print_json(json.dumps(r)) + +@app.command() +def put( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], + content_type: str = RawCommandOptions.content_type, + input: RawCommandOptions.input_file = '-' +): + """ + Make a PUT request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + data = "".join([line for line in input]) + content_type = content_type or guess_content_type(data) + r = client.put(endpoint, headers=req_headers, params=req_params, data=data, raw_body=True, content_type=content_type) + if input == sys.stdin and sys.stdin.isatty() and sys.stdout.isatty(): + print("") + print_json(json.dumps(r)) diff --git a/cortexapps_cli/commands/scim.py b/cortexapps_cli/commands/scim.py new file mode 100644 index 0000000..b569115 --- /dev/null +++ b/cortexapps_cli/commands/scim.py @@ -0,0 +1,77 @@ +from enum import Enum +import json +from rich import print_json +import typer +import urllib.parse + +app = typer.Typer(help="SCIM commands", no_args_is_help=True) + +# As of November 2024, sortBy and sortOrder are not supported in our code and result in a 501 error +# Not sure how domain is supposed to be used so leaving it out too +# Couldn't get patch, delete and add to work so leaving them out until I can do further research +@app.command() +def list( + ctx: typer.Context, + attributes: str = typer.Option(None, "--attributes", "-a", help="Comma-separated list of attributes to include in response; example: name.familyName,active"), + count: int | None = typer.Option(None, "--count", "-c", help="Return only the first 'count' results"), + excluded_attributes: str = typer.Option(None, "--excluded-attributes", "-e", help="Comma-separated list of attributes to exclude from response; example: name.givenName,emails"), + filter: str = typer.Option(None, "--filter", "-f", help="Filtering only supported for userName, example: 'userName eq anish@cortex.io'"), + start_index: int | None = typer.Option(None, "--start-index", "-s", help="Return items starting with index number, indexing starts with 1") +): + """ + Get users based on provided criteria + """ + + client = ctx.obj["client"] + + params = { + "attributes": attributes, + "excludedAttributes": excluded_attributes, + "filter": filter, + "startIndex": start_index, + "count": count + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("scim/v2/Users", params=urllib.parse.urlencode(params)) + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + attributes: str = typer.Option(None, "--attributes", "-a", help="Comma-separated list of attributes to include in response; example: name.familyName,active"), + excluded_attributes: str = typer.Option(None, "--excluded-attributes", "-e", help="Comma-separated list of attributes to exclude from response; example: name.givenName,emails"), + id: str = typer.Option(..., "--id", "-i", help="SCIM id of user to get"), +): + """ + Gets a user based on id + """ + + client = ctx.obj["client"] + + params = { + "attributes": attributes, + "excludedAttributes": excluded_attributes + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("scim/v2/Users/" + id, params=urllib.parse.urlencode(params)) + print_json(data=r) + +# I get a 403 when testing this in my environment, but leaving in because it's syntactically correct +@app.command() +def delete( + ctx: typer.Context, + id: str = typer.Option(..., "--id", "-i", help="SCIM id of user to delete"), +): + """ + Delete a user based on id + """ + + client = ctx.obj["client"] + + r = client.delete("scim/v2/Users/" + id) diff --git a/cortexapps_cli/commands/scorecards.py b/cortexapps_cli/commands/scorecards.py new file mode 100644 index 0000000..0276465 --- /dev/null +++ b/cortexapps_cli/commands/scorecards.py @@ -0,0 +1,192 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +import cortexapps_cli.commands.scorecards_commands.exemptions as exemptions + +app = typer.Typer( + help="Scorecards commands", + no_args_is_help=True +) +app.add_typer(exemptions.app, name="exemptions") + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing YAML representation of scorecard, can be passed as stdin with -, example: -f-")] = None, + dry_run: bool = typer.Option(False, "--dry-run", "-d", help="When true, this endpoint only validates the descriptor contents and returns any errors or warnings"), +): + """ + Create or update a Scorecard using the descriptor YAML. The operation is determined by the existence of a Scorecard with the same tag as passed in the descriptor. + """ + + client = ctx.obj["client"] + + params = { + "dryRun": dry_run + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.post("api/v1/scorecards/descriptor", params=params, data=file_input.read(), content_type="application/yaml;charset=UTF-8") + +@app.command() +def delete( + ctx: typer.Context, + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Delete scorecard + """ + + client = ctx.obj["client"] + + client.delete("api/v1/scorecards/" + scorecard_tag) + +@app.command() +def list( + ctx: typer.Context, + show_drafts: bool = typer.Option(False, "--show-drafts", "-s", help="Whether scorecard in draft mode should be included"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List scorecards + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size, + "showDrafts": show_drafts + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Name=name", + "Tag=tag", + "Description=description", + "IsDraft=isDraft", + ] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/scorecards", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/scorecards", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def shield( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Retrieve scorecard shields.io badge + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/badge") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Get scorecard + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/scorecards/" + scorecard_tag) + print_json(data=r) + +@app.command() +def descriptor( + ctx: typer.Context, + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + Get scorecards YAML descriptor + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/scorecards/" + scorecard_tag + "/descriptor") + if _print: + print(r) + else: + return(r) + +@app.command() +def next_steps( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Retrieve next steps for entity in scorecard + """ + + client = ctx.obj["client"] + + params = { + "entityTag": tag_or_id + } + + r = client.get("api/v1/scorecards/" + scorecard_tag + "/next-steps", params=params) + print_json(data=r) + +@app.command() +def scores( + ctx: typer.Context, + tag_or_id: str | None = typer.Option(None, "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + page: int = typer.Option(0, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), +): + """ + Return latest scores for all entities in the Scorecard + """ + + client = ctx.obj["client"] + + params = { + "entityTag": tag_or_id, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/scorecards/" + scorecard_tag + "/scores", page, _print, params=params) + diff --git a/cortexapps_cli/commands/scorecards_commands/exemptions.py b/cortexapps_cli/commands/scorecards_commands/exemptions.py new file mode 100644 index 0000000..287e18f --- /dev/null +++ b/cortexapps_cli/commands/scorecards_commands/exemptions.py @@ -0,0 +1,94 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Exemptions commands", no_args_is_help=True) + +@app.command() +def request( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + days: int = typer.Option(0, "--days", "-d", help="Number of days that rule should be exempt. If not set, rule will be exempt until revoked."), + reason: str = typer.Option(..., "--reason", "-r", help="Reason for creating exemption"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Request Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "days": days, + "reason": reason, + "ruleIdentifier": rule_identifier + } + + r = client.post("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption", data=data) + print_json(data=r) + +@app.command() +def approve( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Approve Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "ruleIdentifier": rule_identifier + } + + r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/approve", data=data) + print_json(data=r) + +@app.command() +def deny( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), + reason: str = typer.Option(..., "--reason", "-r", help="Reason for creating exemption"), +): + """ + Deny Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "ruleIdentifier": rule_identifier, + "reason": reason + } + + r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/deny", data=data) + print_json(data=r) + +@app.command() +def revoke( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + reason: str = typer.Option(..., "--reason", "-r", help="Reason for creating exemption"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Revoke Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "reason": reason, + "ruleIdentifier": rule_identifier + } + + r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/revoke", data=data) + print_json(data=r) diff --git a/cortexapps_cli/commands/teams.py b/cortexapps_cli/commands/teams.py new file mode 100644 index 0000000..787b0da --- /dev/null +++ b/cortexapps_cli/commands/teams.py @@ -0,0 +1,210 @@ +from typing import Optional +from typing_extensions import Annotated +import typer +import json +from rich import print, print_json +from enum import Enum + +from cortexapps_cli.models.team import Team +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context + +app = typer.Typer(help="Teams commands") + +class TeamType(str, Enum): + CORTEX = "CORTEX" + IDP = "IDP" + +@app.command() +def create( + ctx: typer.Context, + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, + tag: str = typer.Option(None, "--tag", "-t", help="Team tag"), + type: TeamType = typer.Option(TeamType.CORTEX, "--type", "-y", help="Team type"), + name: str = typer.Option(None, "--name", "-n", help="Team name"), + description: str = typer.Option(None, "--description", "-d", help="Team description"), + summary: str = typer.Option(None, "--summary", "-s", help="Team summary"), + idp_group: str = typer.Option(None, "--idp-group", "-g", help="IDP group - required when type is IDP"), + idp_provider: str = typer.Option(None, "--idp-provider", "-p", help="IDP provider - required when type is IDP"), +): + """ + Create a team + + Use -f to specify a file to read the team definition from (use -f - to read from stdin.) Or, provide the team attributes directly via the command options. + """ + client = ctx.obj["client"] + if team_input: + if tag or name or description or summary or idp_group or idp_provider: + raise typer.BadParameter("When providing a team definition file, do not specify any other team attributes") + data = json.loads("".join([line for line in team_input])) + else: + if not tag: + raise typer.BadParameter("tag is required if team definition is not provided") + if not name: + raise typer.BadParameter("name is required if team definition is not provided") + + data = { + "type": type, + "teamTag": tag, + "links": [], + "metadata": { + "name": name, + }, + "slackChannels": [], + "cortexTeam": { + "members": [] + }, + } + + if description: + data["metadata"]["description"] = description + + if summary: + data["metadata"]["summary"] = summary + + if type == TeamType.IDP: + if not idp_group: + raise typer.BadParameter("idp-group is required when type is IDP") + if not idp_provider: + raise typer.BadParameter("idp-provider is required when type is IDP") + data["idpGroup"] = { + "group": idp_group, + "provider": idp_provider, + } + + r = client.post("api/v1/teams", data=data) + print_json(json.dumps(r)) + +@app.command() +def list( + ctx: typer.Context, + include_teams_without_members: bool = typer.Option(False, "--include-teams-without-members", help="Include teams without members"), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List teams + + Provide a team tag to list one team, or list all teams if no tag is provided. + """ + client = ctx.obj["client"] + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "ID=id", + "Tag=teamTag", + "Name=metadata.name", + "Type=type", + ] + + params = { + "includeTeamsWithoutMembers": include_teams_without_members, + } + r = client.get("api/v1/teams", params=params) + print_output_with_context(ctx, r) + +@app.command() +def get( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Get a team + """ + client = ctx.obj["client"] + r = client.get_entity(team_tag, 'team') + print_json(json.dumps(r)) + +@app.command() +def delete( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Delete a team + """ + client = ctx.obj["client"] + client.delete_entity(team_tag, 'team') + +@app.command() +def archive( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Archive a team + """ + client = ctx.obj["client"] + r = client.archive_entity(team_tag, 'team') + print_json(json.dumps(r)) + +@app.command() +def unarchive( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Unarchive a team + """ + client = ctx.obj["client"] + r = client.unarchive_entity(team_tag, 'team') + print_json(json.dumps(r)) + +@app.command() +def update( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="The tag of the team to update"), + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, + name: str = typer.Option(None, "--name", "-n", help="Team name"), + description: str = typer.Option(None, "--description", "-d", help="Team description"), + summary: str = typer.Option(None, "--summary", "-s", help="Team summary"), +): + """ + Update team + """ + client = ctx.obj["client"] + if team_input: + if name or description or summary: + raise typer.BadParameter("When providing a team definition file, do not specify any other team attributes") + team = Team.from_json("".join([line for line in team_input])) + else: + team = Team.from_obj(client.get(f"api/v1/teams/{team_tag}")) + if name: + team.metadata.name = name + if description: + team.metadata.description = description + if summary: + team.metadata.summary = summary + r = client.put(f"api/v1/teams/{team_tag}", data=team.to_obj()) + print_json(json.dumps(r)) + +@app.command("update-metadata") +def update_metadata( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="The tag of the team to update"), + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, +): + """ + Update team metadata + """ + + # all the update methods seem to do the same thing when reading from a file + update(ctx, team_tag=team_tag, team_input=team_input, name=None, description=None, summary=None) + +@app.command("update-members") +def update_members( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="The tag of the team to update"), + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, +): + """ + Update team members + """ + + # all the update methods seem to do the same thing when reading from a file + update(ctx, team_tag=team_tag, team_input=team_input, name=None, description=None, summary=None) diff --git a/cortexapps_cli/commands/workflows.py b/cortexapps_cli/commands/workflows.py new file mode 100644 index 0000000..ec83682 --- /dev/null +++ b/cortexapps_cli/commands/workflows.py @@ -0,0 +1,144 @@ +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output +from typing_extensions import Annotated +import json +import typer +import yaml + +app = typer.Typer( + help="Workflows commands", + no_args_is_help=True +) + +def _is_valid_yaml(filepath): + try: + yaml.safe_load(filepath) + filepath.seek(0) + return True + except yaml.YAMLError: + return False + +def _is_valid_json(filepath): + try: + json.load(filepath) + filepath.seek(0) + return True + except json.JSONDecodeError: + return False + +@app.command() +def list( + ctx: typer.Context, + include_actions: bool = typer.Option(False, "--include-actions", "-i", help="When true, returns the list of actions for each workflow. Defaults to false"), + search_query: str = typer.Option(None, "--search-query", "-s", help="When set, only returns workflows with the given substring in the name or description"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + Get users based on provided criteria. API key must have the View workflows permission + """ + + client = ctx.obj["client"] + + params = { + "includeActions": include_actions, + "searchQuery": search_query, + "page": page, + "pageSize": page_size + } + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Name=name", + "Tag=tag", + "Description=description", + ] + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/workflows", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/workflows", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag or unique, auto-generated identifier for the workflow"), + yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), + _print: CommandOptions._print = True, +): + """ + Retrieve workflow by tag or ID. API key must have the View workflows permission. + """ + + client = ctx.obj["client"] + + if yaml: + headers={'Accept': 'application/yaml'} + else: + headers={'Accept': 'application/json'} + r = client.get("api/v1/workflows/" + tag, headers=headers) + + if _print: + if yaml: + print(r) + else: + print_output_with_context(ctx, r) + else: + if yaml: + return(r) + else: + print_output_with_context(ctx, r) + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag or unique, auto-generated identifier for the workflow"), +): + """ + Delete workflow by tag or ID. API key must have the Edit workflows permission. + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/workflows/" + tag) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing workflow definition; can be passed as stdin with -, example: -f-")], +): + """ + Create or update new workflow. API key must have the Edit workflows permission. Note: If a workflow with the same tag already exists, it will be updated. + """ + + client = ctx.obj["client"] + + if _is_valid_json(file_input): + content_type="application/json" + data = json.loads("".join([line for line in file_input])) + elif _is_valid_yaml(file_input): + data=file_input.read() + content_type="application/yaml" + else: + raise typer.BadParameter("Input file is neither valid JSON nor YAML.") + + r = client.post("api/v1/workflows", data=data, content_type=content_type) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py new file mode 100644 index 0000000..b84eafe --- /dev/null +++ b/cortexapps_cli/cortex_client.py @@ -0,0 +1,178 @@ +import requests +import json +import typer +from rich import print +from rich import print_json +from rich.markdown import Markdown +from rich.console import Console +import logging +import urllib.parse + +from cortexapps_cli.utils import guess_data_key + + +class CortexClient: + def __init__(self, api_key, tenant, numeric_level, base_url='https://api.getcortexapp.com'): + self.api_key = api_key + self.tenant = tenant + self.base_url = base_url + + logging.basicConfig(level=numeric_level) + self.logger = logging.getLogger(__name__) + + def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=False, raw_response=False, content_type='application/json'): + req_headers = { + 'Authorization': f'Bearer {self.api_key}', + 'Content-Type': content_type, + **headers + } + url = '/'.join([self.base_url.rstrip('/'), endpoint.lstrip('/')]) + + req_data = data + if not raw_body: + if content_type == 'application/json' and isinstance(data, dict): + req_data = json.dumps(data) + + response = requests.request(method, url, params=params, headers=req_headers, data=req_data) + + self.logger.debug(f"Request Headers: {response.request.headers}") + self.logger.debug(f"Response Status Code: {response.status_code}") + self.logger.debug(f"Response Headers: {response.headers}") + self.logger.debug(f"Response Content: {response.text}") + + if not response.ok: + try: + # try to parse the error message + error = response.json() + status = response.status_code + message = error.get('message', 'Unknown error') + details = error.get('details', 'No details') + request_id = error.get('requestId', 'No request ID') + error_str = f'[red][bold]HTTP Error {status}[/bold][/red]: {message} - {details} [dim](Request ID: {request_id})[/dim]' + print(error_str) + raise typer.Exit(code=1) + except json.JSONDecodeError: + # if we can't parse the error message, just raise the HTTP error + response.raise_for_status() + + if raw_response: + return response + + try: + return response.json() + except json.JSONDecodeError: + if isinstance(response.text, str): + return response.text + elif isinstance(response.content, bytes): + return response.content + else: + return None + + def get(self, endpoint, params={}, headers={}, raw_response=False, content_type='application/yaml'): + return self.request('GET', endpoint, params=params, headers=headers, raw_response=raw_response, content_type=content_type) + + def post(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): + return self.request('POST', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) + + def put(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): + return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) + + def patch(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): + return self.request('PATCH', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) + + def delete(self, endpoint, data={}, params={}, headers={}, raw_response=False): + return self.request('DELETE', endpoint, data=data, params=params, headers=headers, raw_response=raw_response) + + def fetch(self, endpoint, params={}, headers={}): + # do paginated fetch, page number is indexed at 0 + # param page is page number, param pageSize is page size, default 250 + page = 0 + page_size = 250 + data_key = None + data = [] + while True: + response = self.get(endpoint, params={**params, 'page': page, 'pageSize': page_size}, headers=headers) + if not (isinstance(response, dict) or isinstance(response, list)): + # something is terribly wrong; this is definitely not a paginated response + break + + if data_key is None: + # first page, guess the data key + data_key = guess_data_key(response) + + # Some endpoints just return an array as the root element. In those cases, data_key is '' + if data_key == '': + # if the data key is empty, the response is a list; an empty list means no more data + if len(response) == 0: + break + data.extend(response) + else: + if data_key not in response or not response[data_key]: + break + data.extend(response[data_key]) + if response['totalPages'] == page + 1: + break + page += 1 + + if data_key == '': + return data + + return { + "total": len(data), + "page": 0, + "totalPages": 1 if data else 0, + data_key: data, + } + + def fetch_or_get(self, endpoint, page, prt, params={}): + if page is None: + # if page is not specified, we want to fetch all pages + r = self.fetch(endpoint, params=params) + else: + # if page is specified, we want to fetch only that page + r = self.get(endpoint, params=params) + + if prt: + print_json(data=r) + else: + return(r) + + + def get_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.get(f'api/v1/{path_for_type}/{entity_tag}') + + def delete_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.delete(f'api/v1/{path_for_type}/{entity_tag}') + + def archive_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.put(f'api/v1/{path_for_type}/{entity_tag}/archive') + + def unarchive_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.put(f'api/v1/{path_for_type}/{entity_tag}/unarchive') + + def read_file(self, file): + return file.read() diff --git a/cortexapps_cli/models/team.py b/cortexapps_cli/models/team.py new file mode 100644 index 0000000..778edba --- /dev/null +++ b/cortexapps_cli/models/team.py @@ -0,0 +1,201 @@ +import json +from enum import Enum +from typing import List, Optional, Union + +class TeamType(Enum): + CORTEX = "CORTEX" + IDP = "IDP" + +class Team: + class Metadata: + def __init__(self, name: str, description: Optional[str] = None, summary: Optional[str] = None): + self.name = name + self.description = description + self.summary = summary + + @classmethod + def from_obj(cls, obj): + return cls( + name=obj['name'], + description=obj.get('description'), + summary=obj.get('summary') + ) + + def to_obj(self): + return { + 'name': self.name, + 'description': self.description, + 'summary': self.summary + } + + class Link: + def __init__(self, description: str, name: str, type: str, url: str): + self.description = description + self.name = name + self.type = type + self.url = url + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + class SlackChannel: + def __init__(self, description: str, name: str, notificationsEnabled: bool): + self.description = description + self.name = name + self.notificationsEnabled = notificationsEnabled + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + class CortexMember: + def __init__(self, email: str, name: str, description: Optional[str] = None, role: Optional[str] = None, notificationsEnabled: bool = True): + self.email = email + self.name = name + self.description = description + self.role = role + self.notificationsEnabled = notificationsEnabled + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + class CortexTeam: + def __init__(self, members: List['Team.CortexMember']): + self.members = members + + @classmethod + def from_obj(cls, obj): + return cls(members=[Team.CortexMember.from_obj(member) for member in obj['members']]) + + def to_obj(self): + return {'members': [member.to_obj() for member in self.members]} + + class IdpGroup: + def __init__(self, group: str, provider: str): + self.group = group + self.provider = provider + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + + def __init__(self, + teamTag: str, + metadata_name: str, + type: TeamType, + id: Optional[str] = None, + links: Optional[List[Link]] = None, + slackChannels: Optional[List[SlackChannel]] = None, + cortexTeam: Optional['Team.CortexTeam'] = None, + idpGroup: Optional['Team.IdpGroup'] = None, + catalogEntityTag: Optional[str] = None, + metadata_description: Optional[str] = None, + metadata_summary: Optional[str] = None, + isArchived: bool = False + ): + + if type == TeamType.CORTEX and cortexTeam is None: + raise ValueError("cortexTeam.members must exist if type is 'CORTEX'") + + if type == TeamType.IDP and idpGroup is None: + raise ValueError("idpGroup must exist if type is 'IDP'") + + self.id = id + self.teamTag = teamTag + self.catalogEntityTag = catalogEntityTag or teamTag + self.metadata = self.Metadata(metadata_name, metadata_description, metadata_summary) + self.links = links or [] + self.slackChannels = slackChannels or [] + self.isArchived = isArchived + self.cortexTeam = cortexTeam + self.idpGroup = idpGroup + self.type = type + + @classmethod + def from_json(cls, data: Union[str, dict]): + if isinstance(data, str): + data = json.loads(data) + return cls.from_obj(data) + + def to_json(self): + return json.dumps(self.to_obj(), indent=4) + + @classmethod + def from_obj(cls, obj: dict): + type_enum = TeamType(obj['type']) + + cortex_team = None + idp_group = None + + if type_enum == TeamType.CORTEX: + cortex_team = cls.CortexTeam.from_obj(obj['cortexTeam']) + elif type_enum == TeamType.IDP: + idp_group = cls.IdpGroup.from_obj(obj['idpGroup']) + + links = [cls.Link.from_obj(link) for link in obj.get('links', [])] + slack_channels = [cls.SlackChannel.from_obj(channel) for channel in obj.get('slackChannels', [])] + + return cls( + teamTag=obj['teamTag'], + metadata_name=obj['metadata']['name'], + type=type_enum, + links=links, + slackChannels=slack_channels, + cortexTeam=cortex_team, + idpGroup=idp_group, + id=obj.get('id'), + catalogEntityTag=obj.get('catalogEntityTag'), + metadata_description=obj['metadata'].get('description'), + metadata_summary=obj['metadata'].get('summary'), + isArchived=obj.get('isArchived', False), + ) + + def to_obj(self): + data = { + "id": self.id, + "teamTag": self.teamTag, + "catalogEntityTag": self.catalogEntityTag, + "metadata": self.metadata.to_obj(), + "links": [link.to_obj() for link in self.links], + "slackChannels": [channel.to_obj() for channel in self.slackChannels], + "isArchived": self.isArchived, + "type": self.type.value + } + + if self.type == TeamType.CORTEX and self.cortexTeam: + data["cortexTeam"] = self.cortexTeam.to_obj() + + if self.type == TeamType.IDP and self.idpGroup: + data["idpGroup"] = self.idpGroup.to_obj() + + return data + +def main(): + # Creating manually + team_manual = Team( + teamTag="retail2-partner-experience", + metadata_name="Retail2 Partner Experience", + type=TeamType.CORTEX, + links=[], + slackChannels=[], + cortexTeam=Team.CortexTeam(members=[]) + ) + print(json.dumps(team_manual.to_obj(), indent=4)) + +if (__name__ == "__main__"): + main() diff --git a/cortexapps_cli/utils.py b/cortexapps_cli/utils.py new file mode 100644 index 0000000..65b0ec5 --- /dev/null +++ b/cortexapps_cli/utils.py @@ -0,0 +1,202 @@ +import csv +import json +import re +import sys +import typer + +from typing import overload + +from rich import print_json +from rich.table import Table +from rich.console import Console + +def guess_data_key(response: list | dict): + """ + Guess the key of the data list in a paginated response. + + Args: + response (list or dict): The response to guess the data key from. + + Returns: + The key of the data list in the response. + """ + if isinstance(response, list): + # if the response is a list, there is no data key + return '' + if isinstance(response, dict): + # if the response is a dict, it should have exactly one key whose value is a list + data_keys = [k for k, v in response.items() if isinstance(v, list)] + if len(data_keys) == 0: + # if no such key is found, raise an error + raise ValueError(f"Response dict does not contain a list: {response}") + if len(data_keys) > 1: + # if more than one such key is found, raise an error + raise ValueError(f"Response dict contains multiple lists: {response}") + return data_keys[0] + + # if the response is neither a list nor a dict, raise an error + raise ValueError(f"Response is not a list or dict: {response}") + +def get_value_at_path(data, path): + """ + Get the value at a specified path in a nested dictionary. + + Args: + data (dict): The input dictionary. + path (str): The path to the desired value, separated by dots. + + Returns: + The value at the specified path or None if the path doesn't exist. + """ + keys = path.split(".") + current = data + + try: + for key in keys: + if isinstance(current, dict): + current = current.get(key) + elif isinstance(current, list): + key = int(key) + current = current[key] + else: + return None + return current + except: + return None + +def matches_filters(data, filters): + """ + Check if a dictionary matches a list of filters. + + Args: + data (dict): The dictionary to check. + filters (list): A list of filters in the format jsonpath=regex. + + Returns: + True if the dictionary matches all filters, False otherwise. + """ + if not filters: + return True + for filter in filters: + jsonpath, regex = filter.split("=") + value = get_value_at_path(data, jsonpath) + if value is None: + return False + if not re.match(regex, str(value)): + return False + return True + +def humanize_value(value): + """ + Convert a value to a human-readable string. + + Args: + value: The value to convert. + + Returns: + A human-readable string representation of the value. + """ + if value is None: + return "" + if isinstance(value, list): + return ', '.join([str(x) for x in value]) + if isinstance(value, dict): + return json.dumps(value, indent=2) + return str(value) + +def print_output(data, columns=None, filters=None, sort=None, output_format='json', no_headers=False): + """ + Print output in the specified format. + + Args: + data: The data to print. + columns: A list of columns to include in the output. + filters: A list of filters to apply to the data. + output_format: The format to print the data in. + no_headers: if column headers should not be shown + """ + + if output_format is None: + output_format = 'json' + elif not output_format in ['json', 'table', 'csv']: + raise ValueError("Invalid output format. Must be one of: json, table, csv") + + if output_format == 'json': + if columns: + raise typer.BadParameter("Columns can only be specified when using --table or --csv") + if filters: + raise typer.BadParameter("Filters can only be specified when using --table or --csv") + print_json(data=data) + return + + data_key = guess_data_key(data) + table_data = data.get(data_key) if data_key else data + + if not isinstance(table_data, list): + raise ValueError(f"Data is not a list: {table_data}") + + if not columns: + raise typer.BadParameter("Columns must be specified when using --table or --csv") + + columns = list(columns) + for idx, column in enumerate(columns): + if not re.match(r"^[a-zA-Z0-9_. ]+=[a-zA-Z0-9_.]+$", column): + if re.match(r"^[a-zA-Z0-9_.]+$", column): + # if no column name is specified and it's a valid jsonpath, use the jsonpath as the column name + columns[idx] = f"{column}={column}" + else: + raise typer.BadParameter("Columns must be in the format HeaderName=jsonpath") + + if filters: + for filter in filters: + if not re.match(r"^[a-zA-Z0-9_.]+=.+$", filter): + raise typer.BadParameter("Filters must be in the format jsonpath=regex") + + column_headers = [x.split('=')[0] for x in columns] + column_accessors = [x.split('=')[1] for x in columns] + rows = [] + + if sort: + for sort_item in sort: + if not re.match(r"^[a-zA-Z0-9_.]+:(asc|ASC|desc|DESC)$", sort_item): + raise typer.BadParameter("Sort must be in the format jsonpath:asc or jsonpath:desc") + (jsonpath, order) = sort_item.split(':') + if order.lower() == 'asc': + table_data = sorted(table_data, key=lambda x: get_value_at_path(x, jsonpath)) + elif order.lower() == 'desc': + table_data = sorted(table_data, key=lambda x: get_value_at_path(x, jsonpath), reverse=True) + + for item in table_data: + if matches_filters(item, filters): + rows.append([humanize_value(get_value_at_path(item, accessor)) for accessor in column_accessors]) + + if output_format == 'table': + table = Table() + for header in column_headers: + table.add_column(header) + for row in rows: + table.add_row(*row) + console = Console() + console.print(table) + elif output_format == 'csv': + csv_writer = csv.writer(sys.stdout, lineterminator='\n') + if not no_headers: + csv_writer.writerow(column_headers) + csv_writer.writerows(rows) + +def print_output_with_context(ctx: typer.Context, data): + columns = ctx.params.get('columns', None) + filters = ctx.params.get('filters', None) + sort = ctx.params.get('sort', None) + table_output = ctx.params.get('table_output', None) + csv_output = ctx.params.get('csv_output', None) + no_headers = ctx.params.get('no_headers', None) + if table_output and csv_output: + raise typer.BadParameter("Only one of --table and --csv can be specified") + if table_output: + output_format = 'table' + elif csv_output: + output_format = 'csv' + else: + output_format = 'json' + print_output(data, columns=columns, filters=filters, sort=sort, output_format=output_format, no_headers=no_headers) diff --git a/data/catalog/ach-payments-nacha.yaml b/data/catalog/ach-payments-nacha.yaml deleted file mode 100644 index 9c0eb70..0000000 --- a/data/catalog/ach-payments-nacha.yaml +++ /dev/null @@ -1,47 +0,0 @@ -info: - description: null - title: ACH payments NACHA - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (ach-payments-nacha) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 02/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for ach-payments-nacha - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: ach-payments-nacha - x-cortex-groups: - - public-api-test - - public-api-test-group-1 - x-cortex-type: service - x-cortex-slack: - channels: - - name: ach-payments - x-cortex-owners: - - name: payments-team - type: GROUP - provider: CORTEX -openapi: 3.0.0 diff --git a/data/catalog/admin-customer-support.yaml b/data/catalog/admin-customer-support.yaml deleted file mode 100644 index 22b0113..0000000 --- a/data/catalog/admin-customer-support.yaml +++ /dev/null @@ -1,40 +0,0 @@ -info: - description: null - title: Admin customer support - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (admin-customer-support) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 27/Oct/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for admin-customer-support - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: admin-customer-support - x-cortex-groups: - - public-api-test - - public-api-test-group-2 - x-cortex-type: service -openapi: 3.0.0 diff --git a/data/catalog/alerting-stock-service.yaml b/data/catalog/alerting-stock-service.yaml deleted file mode 100644 index 0648773..0000000 --- a/data/catalog/alerting-stock-service.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Alerting stock service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (alerting-stock-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 19/Jan/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for alerting-stock-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: alerting-stock-service - x-cortex-groups: - - public-api-test - x-cortex-type: service -openapi: 3.0.0 diff --git a/data/catalog/api-australia.yaml b/data/catalog/api-australia.yaml deleted file mode 100644 index c6c7703..0000000 --- a/data/catalog/api-australia.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Australia - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Australia) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-australia - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-back-profiles.yaml b/data/catalog/api-back-profiles.yaml deleted file mode 100644 index 4ea8612..0000000 --- a/data/catalog/api-back-profiles.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Back profiles - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Back profiles) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-back-profiles - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-bi-systems.yaml b/data/catalog/api-bi-systems.yaml deleted file mode 100644 index 7debf1b..0000000 --- a/data/catalog/api-bi-systems.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: BI systems - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-BI systems) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-bi-systems - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-analytics.yaml b/data/catalog/api-business-analytics.yaml deleted file mode 100644 index a3d53f5..0000000 --- a/data/catalog/api-business-analytics.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Analytics - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Analytics) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-analytics - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-development.yaml b/data/catalog/api-business-development.yaml deleted file mode 100644 index e3f1aa7..0000000 --- a/data/catalog/api-business-development.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Development - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Development) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-development - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-innovation.yaml b/data/catalog/api-business-innovation.yaml deleted file mode 100644 index e9b9f58..0000000 --- a/data/catalog/api-business-innovation.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Innovation - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Innovation) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-innovation - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-model.yaml b/data/catalog/api-business-model.yaml deleted file mode 100644 index 96507de..0000000 --- a/data/catalog/api-business-model.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Model - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Model) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-model - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-operations.yaml b/data/catalog/api-business-operations.yaml deleted file mode 100644 index 1287519..0000000 --- a/data/catalog/api-business-operations.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Operations - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Operations) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-operations - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-plan.yaml b/data/catalog/api-business-plan.yaml deleted file mode 100644 index b13df15..0000000 --- a/data/catalog/api-business-plan.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Plan - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Plan) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-plan - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-process.yaml b/data/catalog/api-business-process.yaml deleted file mode 100644 index 7f74f11..0000000 --- a/data/catalog/api-business-process.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Process - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Process) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-process - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-systems.yaml b/data/catalog/api-business-systems.yaml deleted file mode 100644 index f0611ab..0000000 --- a/data/catalog/api-business-systems.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Systems - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Systems) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-systems - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-technology.yaml b/data/catalog/api-business-technology.yaml deleted file mode 100644 index 2890492..0000000 --- a/data/catalog/api-business-technology.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Technology - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Technology) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-technology - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-transformation.yaml b/data/catalog/api-business-transformation.yaml deleted file mode 100644 index 0173371..0000000 --- a/data/catalog/api-business-transformation.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Transformation - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Transformation) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-transformation - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-caribbean.yaml b/data/catalog/api-caribbean.yaml deleted file mode 100644 index 9b51121..0000000 --- a/data/catalog/api-caribbean.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Caribbean - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Caribbean) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-caribbean - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-central-america.yaml b/data/catalog/api-central-america.yaml deleted file mode 100644 index 972adcc..0000000 --- a/data/catalog/api-central-america.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Central America - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Central America) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-central-america - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-circle-cicd.yaml b/data/catalog/api-circle-cicd.yaml deleted file mode 100644 index d0443c7..0000000 --- a/data/catalog/api-circle-cicd.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Circle CICD - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Circle CICD) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-circle-cicd - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-code-search.yaml b/data/catalog/api-code-search.yaml deleted file mode 100644 index 67c376e..0000000 --- a/data/catalog/api-code-search.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Code search - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Code search) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-codeR-earch - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-cold-storage.yaml b/data/catalog/api-cold-storage.yaml deleted file mode 100644 index 3d8f28f..0000000 --- a/data/catalog/api-cold-storage.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Cold storage - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Cold storage) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-cold-storage - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-credit-card.yaml b/data/catalog/api-credit-card.yaml deleted file mode 100644 index bed76a7..0000000 --- a/data/catalog/api-credit-card.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Credit card - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Credit card) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-credit-card - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-datacenter-monitors.yaml b/data/catalog/api-datacenter-monitors.yaml deleted file mode 100644 index f216a75..0000000 --- a/data/catalog/api-datacenter-monitors.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Datacenter monitors - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Datacenter monitors) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-datacenter-monitors - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-design-file-updates.yaml b/data/catalog/api-design-file-updates.yaml deleted file mode 100644 index 1b46a79..0000000 --- a/data/catalog/api-design-file-updates.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Design file updates - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Design file updates) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-design-file-updates - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-dev-pros.yaml b/data/catalog/api-dev-pros.yaml deleted file mode 100644 index 7000594..0000000 --- a/data/catalog/api-dev-pros.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Dev pros - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Dev pros) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-dev-pros - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-europe.yaml b/data/catalog/api-europe.yaml deleted file mode 100644 index 3e28132..0000000 --- a/data/catalog/api-europe.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Europe - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Europe) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-Europe - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-feature-services.yaml b/data/catalog/api-feature-services.yaml deleted file mode 100644 index 9037c86..0000000 --- a/data/catalog/api-feature-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Feature services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Feature services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-feature-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-management.yaml b/data/catalog/api-infrastructure-management.yaml deleted file mode 100644 index 834da2c..0000000 --- a/data/catalog/api-infrastructure-management.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Management - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Management) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-management - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-monitoring.yaml b/data/catalog/api-infrastructure-monitoring.yaml deleted file mode 100644 index 52b6049..0000000 --- a/data/catalog/api-infrastructure-monitoring.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Monitoring - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Monitoring) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-monitoring - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-planning.yaml b/data/catalog/api-infrastructure-planning.yaml deleted file mode 100644 index dedde29..0000000 --- a/data/catalog/api-infrastructure-planning.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Planning - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Planning) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-planning - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-security.yaml b/data/catalog/api-infrastructure-security.yaml deleted file mode 100644 index 4525ea0..0000000 --- a/data/catalog/api-infrastructure-security.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Security - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Security) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-security - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-services.yaml b/data/catalog/api-infrastructure-services.yaml deleted file mode 100644 index 913968f..0000000 --- a/data/catalog/api-infrastructure-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-strategy.yaml b/data/catalog/api-infrastructure-strategy.yaml deleted file mode 100644 index baf8617..0000000 --- a/data/catalog/api-infrastructure-strategy.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Strategy - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Strategy) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-strategy - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-na-west.yaml b/data/catalog/api-na-west.yaml deleted file mode 100644 index be97d1d..0000000 --- a/data/catalog/api-na-west.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: NA west - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-NA west) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-na-west - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-network-telemetry.yaml b/data/catalog/api-network-telemetry.yaml deleted file mode 100644 index 88ffb3e..0000000 --- a/data/catalog/api-network-telemetry.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Network telemetry - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Network telemetry) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-network-telemetry - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-hardware.yaml b/data/catalog/api-networking-hardware.yaml deleted file mode 100644 index 4a204fa..0000000 --- a/data/catalog/api-networking-hardware.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Hardware - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Hardware) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-hardware - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-infrastructure.yaml b/data/catalog/api-networking-infrastructure.yaml deleted file mode 100644 index 29f290a..0000000 --- a/data/catalog/api-networking-infrastructure.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Infrastructure - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Infrastructure) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-infrastructure - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-protocol.yaml b/data/catalog/api-networking-protocol.yaml deleted file mode 100644 index 864a895..0000000 --- a/data/catalog/api-networking-protocol.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Protocol - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Protocol) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-protocol - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-services.yaml b/data/catalog/api-networking-services.yaml deleted file mode 100644 index 1ffba74..0000000 --- a/data/catalog/api-networking-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-software.yaml b/data/catalog/api-networking-software.yaml deleted file mode 100644 index f631896..0000000 --- a/data/catalog/api-networking-software.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Software - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Software) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-software - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-solutions.yaml b/data/catalog/api-networking-solutions.yaml deleted file mode 100644 index d00d388..0000000 --- a/data/catalog/api-networking-solutions.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Solutions - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Solutions) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-solutions - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-oceania.yaml b/data/catalog/api-oceania.yaml deleted file mode 100644 index ae90f47..0000000 --- a/data/catalog/api-oceania.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Oceania - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Oceania) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-Oceania - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-polar-regions.yaml b/data/catalog/api-polar-regions.yaml deleted file mode 100644 index 839c102..0000000 --- a/data/catalog/api-polar-regions.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Polar Regions - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Polar Regions) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-polar-regions - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-premium-logins.yaml b/data/catalog/api-premium-logins.yaml deleted file mode 100644 index 8b10f39..0000000 --- a/data/catalog/api-premium-logins.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Premium logins - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Premium logins) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-premium-logins - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-profile-integrations.yaml b/data/catalog/api-profile-integrations.yaml deleted file mode 100644 index b5d3cf8..0000000 --- a/data/catalog/api-profile-integrations.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: profile integrations - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-profile integrations) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-profile-integrations - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-quarterly-reports.yaml b/data/catalog/api-quarterly-reports.yaml deleted file mode 100644 index 49f427d..0000000 --- a/data/catalog/api-quarterly-reports.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Quarterly reports - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Quarterly reports) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-quarterly-reports - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-security-health-status.yaml b/data/catalog/api-security-health-status.yaml deleted file mode 100644 index 2224293..0000000 --- a/data/catalog/api-security-health-status.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Security health status - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Security health status) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-security-health-status - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-south-america-region.yaml b/data/catalog/api-south-america-region.yaml deleted file mode 100644 index 5529539..0000000 --- a/data/catalog/api-south-america-region.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: South America Region - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-South America Region) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-south-america-region - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-standards-and-compliance.yaml b/data/catalog/api-standards-and-compliance.yaml deleted file mode 100644 index a3dbfeb..0000000 --- a/data/catalog/api-standards-and-compliance.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Standards and compliance - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Standards and compliance) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-standards-and-compliance - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-transactions-api.yaml b/data/catalog/api-transactions-api.yaml deleted file mode 100644 index b5f140c..0000000 --- a/data/catalog/api-transactions-api.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Transaction API - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Transaction API) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-transaction-api - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-user-services.yaml b/data/catalog/api-user-services.yaml deleted file mode 100644 index 17b7c68..0000000 --- a/data/catalog/api-user-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: User services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-User services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-user-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/auth-team.yaml b/data/catalog/auth-team.yaml deleted file mode 100644 index 0cf281b..0000000 --- a/data/catalog/auth-team.yaml +++ /dev/null @@ -1,27 +0,0 @@ -info: - description: null - title: Authentication Team - version: 1.0.0 - x-cortex-links: - - name: Authentication Team Onboarding Guide - type: onboarding - - name: Authentication Team Best Practices - type: best_practices - x-cortex-tag: auth-team - x-cortex-groups: - - public-api-test - x-cortex-team: - members: - - email: nikhil.unni@cortex.io - name: Nikhil Unni - notificationsEnabled: true - - email: aditya.bansal@cortex.io - name: Aditya Bansal - notificationsEnabled: true - - email: eyal.foni@cortex.io - name: Eyal Foni - notificationsEnabled: true - - email: jaroslaw.gaworecki@cortex.io - name: Jarosław Gaworecki - notificationsEnabled: true -openapi: 3.0.0 diff --git a/data/catalog/authentication.yaml b/data/catalog/authentication.yaml deleted file mode 100644 index 3ea113f..0000000 --- a/data/catalog/authentication.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Authentication - version: 1.0.0 - x-cortex-links: - - name: Authentication Overview - type: documentation - - name: Authentication Specs - type: specs - - name: Authentication Bug bashes - type: bug_bashes - x-cortex-tag: authentication - x-cortex-type: domain - x-cortex-groups: - - public-api-test - x-cortex-children: - - tag: oauth2-identity-service - - tag: sso-integration -openapi: 3.0.0 diff --git a/data/catalog/autocomplete-parser.yaml b/data/catalog/autocomplete-parser.yaml deleted file mode 100644 index c67c638..0000000 --- a/data/catalog/autocomplete-parser.yaml +++ /dev/null @@ -1,38 +0,0 @@ -info: - description: null - title: Autocomplete parser - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (autocomplete-parser) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 27/Nov/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for autocomplete-parser - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: autocomplete-parser - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/autocomplete.yaml b/data/catalog/autocomplete.yaml deleted file mode 100644 index f1938b9..0000000 --- a/data/catalog/autocomplete.yaml +++ /dev/null @@ -1,41 +0,0 @@ -info: - description: >- - **Autocomplete** is the service used by our [frontend](https://cortex.io) to autocomplete searches made by a user on our site. It utilizes the following frameworks: - * Elasticsearch - * redis - title: Autocomplete - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (autocomplete) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 18/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for autocomplete - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: autocomplete - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/checkout.yaml b/data/catalog/checkout.yaml deleted file mode 100644 index 7d270a0..0000000 --- a/data/catalog/checkout.yaml +++ /dev/null @@ -1,18 +0,0 @@ -info: - description: null - title: Checkout - version: 1.0.0 - x-cortex-links: - - name: Checkout Overview - type: documentation - - name: Checkout Specs - type: specs - - name: Checkout Bug bashes - type: bug_bashes - x-cortex-tag: checkout - x-cortex-type: domain - x-cortex-children: - - tag: payments - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/credit-card-transaction-processor.yaml b/data/catalog/credit-card-transaction-processor.yaml deleted file mode 100644 index 099fb13..0000000 --- a/data/catalog/credit-card-transaction-processor.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Credit card transaction processor - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (credit-card-transaction-processor) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 21/Sep/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for credit-card-transaction-processor - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: credit-card-transaction-processor - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/data-calculation-batch-job.yaml b/data/catalog/data-calculation-batch-job.yaml deleted file mode 100644 index 08cfb3b..0000000 --- a/data/catalog/data-calculation-batch-job.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Data calculator batch job - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (data-calculator-batch-job) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 16/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for data-calculator-batch-job - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: data-calculator-batch-job - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/data-ingestion.yaml b/data/catalog/data-ingestion.yaml deleted file mode 100644 index cb4e6eb..0000000 --- a/data/catalog/data-ingestion.yaml +++ /dev/null @@ -1,23 +0,0 @@ -info: - description: null - title: Data Ingestion - version: 1.0.0 - x-cortex-links: - - name: Data Ingestion Overview - type: documentation - - name: Data Ingestion Specs - type: specs - - name: Data Ingestion Bug bashes - type: bug_bashes - x-cortex-tag: data-ingestion - x-cortex-type: domain - x-cortex-children: - - tag: data-calculator-batch-job - - tag: inventory-scraper - - tag: image-recognition-pipeline - - tag: retail-image-labeler - - tag: image-store-bucket - - tag: orders-events - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/data.yaml b/data/catalog/data.yaml deleted file mode 100644 index 6ed4b6c..0000000 --- a/data/catalog/data.yaml +++ /dev/null @@ -1,31 +0,0 @@ -info: - description: null - title: Data - version: 1.0.0 - x-cortex-links: - - name: Data Onboarding Guide - type: onboarding - - name: Data Best Practices - type: best_practices - x-cortex-tag: data - x-cortex-type: team - x-cortex-team: - groups: - - name: okta-data - provider: OKTA - members: - - email: nikhil.unni@cortex.io - name: Nikhil Unni - notificationsEnabled: true - - email: jennie.chen@cortex.io - name: Jennie Chen - notificationsEnabled: true - - email: eyal.foni@cortex.io - name: Eyal Foni - notificationsEnabled: true - - email: mikolaj.stepniewski@cortex.io - name: Mikołaj Stępniewski - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/engineering-dev-cluster.yaml b/data/catalog/engineering-dev-cluster.yaml deleted file mode 100644 index eb0c2d7..0000000 --- a/data/catalog/engineering-dev-cluster.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: engineering-dev-cluster - version: 1.0.0 - x-cortex-links: - - name: Error Logs (engineering-dev-cluster) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: engineering-dev-cluster - x-cortex-type: k8s-cluster - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/experience.yaml b/data/catalog/experience.yaml deleted file mode 100644 index 3b84222..0000000 --- a/data/catalog/experience.yaml +++ /dev/null @@ -1,26 +0,0 @@ -info: - description: null - title: Experience - version: 1.0.0 - x-cortex-links: - - name: Experience Overview - type: documentation - - name: Experience Specs - type: specs - - name: Experience Bug bashes - type: bug_bashes - x-cortex-tag: experience - x-cortex-type: domain - x-cortex-owners: - - name: search-experience - type: GROUP - provider: CORTEX - x-cortex-children: - - tag: autocomplete-parser - - tag: autocomplete - - tag: results-cacher - - tag: result-cacher-postgres - - tag: query-analyzer - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/feed-calculator-job.yaml b/data/catalog/feed-calculator-job.yaml deleted file mode 100644 index 9e2ccba..0000000 --- a/data/catalog/feed-calculator-job.yaml +++ /dev/null @@ -1,38 +0,0 @@ -info: - description: null - title: Feed calculator job - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (feed-calculator-job) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 17/Nov/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for feed-calculator-job - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: feed-calculator-job - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/fraud-analyzer.yaml b/data/catalog/fraud-analyzer.yaml deleted file mode 100644 index 919ad1a..0000000 --- a/data/catalog/fraud-analyzer.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Fraud analyzer - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (fraud-analyzer) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 06/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for fraud-analyzer - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: fraud-analyzer - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/identity.yaml b/data/catalog/identity.yaml deleted file mode 100644 index 91ba56a..0000000 --- a/data/catalog/identity.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Identity - version: 1.0.0 - x-cortex-links: - - name: Identity Overview - type: documentation - - name: Identity Specs - type: specs - - name: Identity Bug bashes - type: bug_bashes - x-cortex-tag: identity - x-cortex-type: domain - x-cortex-children: - - tag: profiles - - tag: authentication - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/image-recognition-pipeline.yaml b/data/catalog/image-recognition-pipeline.yaml deleted file mode 100644 index 40b0b1a..0000000 --- a/data/catalog/image-recognition-pipeline.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Image recognition pipeline - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (image-recognition-pipeline) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 10/Apr/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for image-recognition-pipeline - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: image-recognition-pipeline - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/image-store-bucket.yaml b/data/catalog/image-store-bucket.yaml deleted file mode 100644 index 271b5e9..0000000 --- a/data/catalog/image-store-bucket.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: Image Bucket Store - version: 1.0.0 - x-cortex-links: - - name: Error Logs (image-store-bucket) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: image-store-bucket - x-cortex-type: s3 - x-cortex-definition: - containsPii: true - engineVersion: 5.7.mysql_aurora - region: us-east-1 - versioned: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/import-engine.yaml b/data/catalog/import-engine.yaml deleted file mode 100644 index 654664b..0000000 --- a/data/catalog/import-engine.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Import Engine - version: 1.0.0 - x-cortex-links: - - name: Error Logs (import-engine) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: import-engine - x-cortex-type: component - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/inventory-scraper.yaml b/data/catalog/inventory-scraper.yaml deleted file mode 100644 index bdce69f..0000000 --- a/data/catalog/inventory-scraper.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Inventory scraper - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (inventory-scraper) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 23/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for inventory-scraper - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: inventory-scraper - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/inventory-team.yaml b/data/catalog/inventory-team.yaml deleted file mode 100644 index d606c33..0000000 --- a/data/catalog/inventory-team.yaml +++ /dev/null @@ -1,33 +0,0 @@ -info: - description: null - title: Inventory Team - version: 1.0.0 - x-cortex-links: - - name: Inventory Team Onboarding Guide - type: onboarding - - name: Inventory Team Best Practices - type: best_practices - x-cortex-tag: inventory-team - x-cortex-team: - groups: - - name: okta-inventory-team - provider: OKTA - members: - - email: greg.pett@cortex.io - name: Greg Pett - notificationsEnabled: true - - email: andrew.si@cortex.io - name: Andrew Si - notificationsEnabled: true - - email: tyler.ackerson@cortex.io - name: Tyler Ackerson - notificationsEnabled: true - - email: wojciech.garncarz@cortex.io - name: Wojciech Garncarz - notificationsEnabled: true - - email: lukasz.blaszczyk@cortex.io - name: Lukasz Blaszczyk - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/inventory.yaml b/data/catalog/inventory.yaml deleted file mode 100644 index 876ebc4..0000000 --- a/data/catalog/inventory.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Inventory - version: 1.0.0 - x-cortex-links: - - name: Inventory Overview - type: documentation - - name: Inventory Specs - type: specs - - name: Inventory Bug bashes - type: bug_bashes - x-cortex-tag: inventory - x-cortex-type: domain - x-cortex-children: - - tag: robot-item-sorter - - tag: stock-level-analyzer - - tag: alerting-stock-service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/logistics-team.yaml b/data/catalog/logistics-team.yaml deleted file mode 100644 index 5a1a080..0000000 --- a/data/catalog/logistics-team.yaml +++ /dev/null @@ -1,24 +0,0 @@ -info: - description: null - title: Logistics Team - version: 1.0.0 - x-cortex-links: - - name: Logistics Team Onboarding Guide - type: onboarding - - name: Logistics Team Best Practices - type: best_practices - x-cortex-tag: logistics-team - x-cortex-team: - groups: - - name: okta-logistics-team - provider: OKTA - members: - - email: cristina.buenahora@cortex.io - name: Cristina Buenahora - notificationsEnabled: true - - email: lisa.tran@cortex.io - name: Lisa Tran - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/model-innovation-team.yaml b/data/catalog/model-innovation-team.yaml deleted file mode 100644 index 9e1e2ab..0000000 --- a/data/catalog/model-innovation-team.yaml +++ /dev/null @@ -1,27 +0,0 @@ -info: - description: null - title: Model Innovation Team - version: 1.0.0 - x-cortex-links: - - name: Model Innovation Team Onboarding Guide - type: onboarding - - name: Model Innovation Team Best Practices - type: best_practices - x-cortex-tag: model-innovation-team - x-cortex-team: - groups: - - name: okta-model-innovation-team - provider: OKTA - members: - - email: nikhil.unni@cortex.io - name: Nikhil Unni - notificationsEnabled: true - - email: eli.berg@cortex.io - name: Eli Berg - notificationsEnabled: true - - email: eyal.foni@cortex.io - name: Eyal Foni - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/new-item-fanout-service.yaml b/data/catalog/new-item-fanout-service.yaml deleted file mode 100644 index 1a7aba5..0000000 --- a/data/catalog/new-item-fanout-service.yaml +++ /dev/null @@ -1,42 +0,0 @@ -info: - description: null - title: New item fanout service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (new-item-fanout-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 20/May/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for new-item-fanout-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: new-item-fanout-service - x-cortex-type: service - x-cortex-groups: - - public-api-test - x-cortex-git: - github: - repository: my-org/my-repo -openapi: 3.0.0 diff --git a/data/catalog/oauth2-identity-service.yaml b/data/catalog/oauth2-identity-service.yaml deleted file mode 100644 index 3d332c4..0000000 --- a/data/catalog/oauth2-identity-service.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: OAuth2 identity service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (oauth2-identity-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 23/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for oauth2-identity-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: oauth2-identity-service - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/order-management.yaml b/data/catalog/order-management.yaml deleted file mode 100644 index 0ce4cb3..0000000 --- a/data/catalog/order-management.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Order Management - version: 1.0.0 - x-cortex-links: - - name: Order Management Overview - type: documentation - - name: Order Management Specs - type: specs - - name: Order Management Bug bashes - type: bug_bashes - x-cortex-tag: order-management - x-cortex-type: domain - x-cortex-children: - - tag: warehousing - - tag: checkout - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/orders-events.yaml b/data/catalog/orders-events.yaml deleted file mode 100644 index 5436ffa..0000000 --- a/data/catalog/orders-events.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Orders Data Event Stream - version: 1.0.0 - x-cortex-links: - - name: Error Logs (orders-events) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: orders-events - x-cortex-type: kafka-topic - x-cortex-definition: - cluster: prod - serializationTool: grpc - topicName: eats-event - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/packaging-recommender.yaml b/data/catalog/packaging-recommender.yaml deleted file mode 100644 index 9426bf6..0000000 --- a/data/catalog/packaging-recommender.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Packaging recommender - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (packaging-recommender) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 12/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for packaging-recommender - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: packaging-recommender - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/payments-team.yaml b/data/catalog/payments-team.yaml deleted file mode 100644 index c6ff7c4..0000000 --- a/data/catalog/payments-team.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - title: Payments - x-cortex-links: - - name: Payments Onboarding Guide - type: onboarding - - name: Payments Best Practices - type: best_practices - x-cortex-tag: payments-team - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.1 diff --git a/data/catalog/payments.yaml b/data/catalog/payments.yaml deleted file mode 100644 index f637003..0000000 --- a/data/catalog/payments.yaml +++ /dev/null @@ -1,22 +0,0 @@ -info: - description: null - title: Payments - version: 1.0.0 - x-cortex-links: - - name: Payments Overview - type: documentation - - name: Payments Specs - type: specs - - name: Payments Bug bashes - type: bug_bashes - x-cortex-tag: payments - x-cortex-type: domain - x-cortex-children: - - tag: transaction-store - - tag: credit-card-transaction-processor - - tag: ach-payments-nacha - - tag: fraud-analyzer - - tag: transaction-squid-proxy - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profile-management.yaml b/data/catalog/profile-management.yaml deleted file mode 100644 index 22235bf..0000000 --- a/data/catalog/profile-management.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Profile management - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (profile-management) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 02/Nov/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for profile-management - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: profile-management - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profile-pictures.yaml b/data/catalog/profile-pictures.yaml deleted file mode 100644 index b74ded7..0000000 --- a/data/catalog/profile-pictures.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: Public facing profile picture images in all sizes - title: Profile Pictures - version: 1.0.0 - x-cortex-links: - - name: Error Logs (profile-pictures) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: profile-pictures - x-cortex-type: s3 - x-cortex-definition: - containsPii: true - engineVersion: 5.7.mysql_aurora - region: us-east-1 - versioned: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profile-team.yaml b/data/catalog/profile-team.yaml deleted file mode 100644 index 49ad18c..0000000 --- a/data/catalog/profile-team.yaml +++ /dev/null @@ -1,33 +0,0 @@ -info: - description: null - title: Profile Team - version: 1.0.0 - x-cortex-links: - - name: Profile Team Onboarding Guide - type: onboarding - - name: Profile Team Best Practices - type: best_practices - x-cortex-tag: profile-team - x-cortex-team: - groups: - - name: okta-profile-team - provider: OKTA - members: - - email: cristina.buenahora@cortex.io - name: Cristina Buenahora - notificationsEnabled: true - - email: david.barnes@cortex.io - name: David Barnes - notificationsEnabled: true - - email: hanna.vigil@cortex.io - name: Hanna Vigil - notificationsEnabled: true - - email: stormy.adams@cortex.io - name: Stormy Adams - notificationsEnabled: true - - email: igor.rog@cortex.io - name: Igor Rog - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profiles.yaml b/data/catalog/profiles.yaml deleted file mode 100644 index 8e4d00c..0000000 --- a/data/catalog/profiles.yaml +++ /dev/null @@ -1,22 +0,0 @@ -info: - description: null - title: Profiles - version: 1.0.0 - x-cortex-links: - - name: Profiles Overview - type: documentation - - name: Profiles Specs - type: specs - - name: Profiles Bug bashes - type: bug_bashes - x-cortex-tag: profiles - x-cortex-type: domain - x-cortex-children: - - tag: user-profile-metadata-service - - tag: profile-management - - tag: profile-pictures - - tag: user-profile-metadata-service-bucket - - tag: admin-customer-support - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/query-analyzer.yaml b/data/catalog/query-analyzer.yaml deleted file mode 100644 index 08ec766..0000000 --- a/data/catalog/query-analyzer.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Query analyzer - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (query-analyzer) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 28/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for query-analyzer - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: query-analyzer - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/recommendation-engine-kafka.yaml b/data/catalog/recommendation-engine-kafka.yaml deleted file mode 100644 index 65b1a76..0000000 --- a/data/catalog/recommendation-engine-kafka.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Recommendation Engine Kafka - version: 1.0.0 - x-cortex-links: - - name: Error Logs (recommendation-engine-kafka) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: recommendation-engine-kafka - x-cortex-type: kafka-topic - x-cortex-definition: - cluster: staging - serializationTool: avro - topicName: eats-event - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/recommendation-engine.yaml b/data/catalog/recommendation-engine.yaml deleted file mode 100644 index 062ed8b..0000000 --- a/data/catalog/recommendation-engine.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Recommendation engine - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (recommendation-engine) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 08/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for recommendation-engine - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: recommendation-engine - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/recommendations.yaml b/data/catalog/recommendations.yaml deleted file mode 100644 index 92272b9..0000000 --- a/data/catalog/recommendations.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: Recommendations - version: 1.0.0 - x-cortex-links: - - name: Recommendations Overview - type: documentation - - name: Recommendations Specs - type: specs - - name: Recommendations Bug bashes - type: bug_bashes - x-cortex-tag: recommendations - x-cortex-type: domain - x-cortex-children: - - tag: recommendation-engine - - tag: recommendation-engine-kafka - - tag: feed-calculator-job - - tag: new-item-fanout-service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/result-cacher-postgres.yaml b/data/catalog/result-cacher-postgres.yaml deleted file mode 100644 index d0e12d2..0000000 --- a/data/catalog/result-cacher-postgres.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: Result Cacher Postgres - version: 1.0.0 - x-cortex-links: - - name: Error Logs (result-cacher-postgres) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: result-cacher-postgres - x-cortex-type: rds - x-cortex-definition: - containsPii: false - dbFamily: sqlite - region: us-east-2 - version: 10.1.7 - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/retail-image-labeler.yaml b/data/catalog/retail-image-labeler.yaml deleted file mode 100644 index 4cc17c9..0000000 --- a/data/catalog/retail-image-labeler.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Retail image labeler - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (retail-image-labeler) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 04/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for retail-image-labeler - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: retail-image-labeler - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/returns-processor.yaml b/data/catalog/returns-processor.yaml deleted file mode 100644 index 4ee9a47..0000000 --- a/data/catalog/returns-processor.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Returns processor - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (returns-processor) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 01/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for returns-processor - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: returns-processor - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/robot-item-sorter.yaml b/data/catalog/robot-item-sorter.yaml deleted file mode 100644 index dc14d26..0000000 --- a/data/catalog/robot-item-sorter.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Robot item sorter - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (robot-item-sorter) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 13/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for robot-item-sorter - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: robot-item-sorter - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/search-experience.yaml b/data/catalog/search-experience.yaml deleted file mode 100644 index 4bd16e0..0000000 --- a/data/catalog/search-experience.yaml +++ /dev/null @@ -1,29 +0,0 @@ -info: - title: Search Experience - x-cortex-links: - - name: Search Experience Onboarding Guide - type: onboarding - - name: Search Experience Best Practices - type: best_practices - x-cortex-tag: search-experience - x-cortex-type: team - x-cortex-team: - members: - - email: cristina.buenahora@cortex.io - name: Cristina Buenahora - notificationsEnabled: true - - email: david.barnes@cortex.io - name: David Barnes - notificationsEnabled: true - - email: hanna.vigil@cortex.io - name: Hanna Vigil - notificationsEnabled: true - - email: stormy.adams@cortex.io - name: Stormy Adams - notificationsEnabled: true - - email: igor.rog@cortex.io - name: Igor Rog - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/search.yaml b/data/catalog/search.yaml deleted file mode 100644 index 64c5583..0000000 --- a/data/catalog/search.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Search - version: 1.0.0 - x-cortex-links: - - name: Search Overview - type: documentation - - name: Search Specs - type: specs - - name: Search Bug bashes - type: bug_bashes - x-cortex-tag: search - x-cortex-type: domain - x-cortex-children: - - tag: data-ingestion - - tag: experience - - tag: recommendations - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/shipping-integrations.yaml b/data/catalog/shipping-integrations.yaml deleted file mode 100644 index 1832843..0000000 --- a/data/catalog/shipping-integrations.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Shipping integrations - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (shipping-integrations) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 26/May/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for shipping-integrations - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: shipping-integrations - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/sso-integration.yaml b/data/catalog/sso-integration.yaml deleted file mode 100644 index 33ab2f6..0000000 --- a/data/catalog/sso-integration.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: SSO integration - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (sso-integration) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 19/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for sso-integration - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: sso-integration - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/stock-level-analyzer.yaml b/data/catalog/stock-level-analyzer.yaml deleted file mode 100644 index 98abefa..0000000 --- a/data/catalog/stock-level-analyzer.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Stock level analyzer - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (stock-level-analyzer) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 07/Jul/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for stock-level-analyzer - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: stock-level-analyzer - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/team-a.yaml b/data/catalog/team-a.yaml deleted file mode 100644 index 41187e3..0000000 --- a/data/catalog/team-a.yaml +++ /dev/null @@ -1,15 +0,0 @@ -info: - title: Team A - x-cortex-tag: team-a - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-children: - - tag: team-b - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/team-b.yaml b/data/catalog/team-b.yaml deleted file mode 100644 index 5fd54b9..0000000 --- a/data/catalog/team-b.yaml +++ /dev/null @@ -1,15 +0,0 @@ -info: - title: Team B - x-cortex-tag: team-b - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-children: - - tag: team-c - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/team-c.yaml b/data/catalog/team-c.yaml deleted file mode 100644 index d87bd70..0000000 --- a/data/catalog/team-c.yaml +++ /dev/null @@ -1,15 +0,0 @@ -info: - title: Team C - x-cortex-tag: team-c - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-children: - - tag: team-d - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/team-d.yaml b/data/catalog/team-d.yaml deleted file mode 100644 index d1024dc..0000000 --- a/data/catalog/team-d.yaml +++ /dev/null @@ -1,13 +0,0 @@ -info: - title: Team D - x-cortex-tag: team-d - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/transaction-store.yaml b/data/catalog/transaction-store.yaml deleted file mode 100644 index 2b617b1..0000000 --- a/data/catalog/transaction-store.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Transaction store - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (transaction-store) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 01/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for transaction-store - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: transaction-store - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/transactions-squid-proxy.yaml b/data/catalog/transactions-squid-proxy.yaml deleted file mode 100644 index 13d8fda..0000000 --- a/data/catalog/transactions-squid-proxy.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: Production squid proxy used to route transaction traffic - title: Transaction Squid Proxy - version: 1.0.0 - x-cortex-links: - - name: Error Logs (transaction-squid-proxy) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: transaction-squid-proxy - x-cortex-type: squid-proxy - x-cortex-definition: - ip: 206.61.17.15 - vpc: us-east-2 - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/user-profile-metadata-service-bucket.yaml b/data/catalog/user-profile-metadata-service-bucket.yaml deleted file mode 100644 index 5e68a9b..0000000 --- a/data/catalog/user-profile-metadata-service-bucket.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: User Profile Metadata Service Bucket - version: 1.0.0 - x-cortex-links: - - name: Error Logs (user-profile-metadata-service-bucket) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: user-profile-metadata-service-bucket - x-cortex-type: s3 - x-cortex-definition: - containsPii: false - engineVersion: 5.3.mysql_aurora - region: us-east-2 - versioned: false - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/user-profile-metadata-service.yaml b/data/catalog/user-profile-metadata-service.yaml deleted file mode 100644 index cac3714..0000000 --- a/data/catalog/user-profile-metadata-service.yaml +++ /dev/null @@ -1,41 +0,0 @@ -info: - description: null - title: User profile metadata service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (user-profile-metadata-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 23/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for user-profile-metadata-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: user-profile-metadata-service - x-cortex-type: service - x-cortex-groups: - - public-api-test - x-cortex-custom-metadata: - hasSecurityScans: true -openapi: 3.0.0 diff --git a/data/catalog/usps-api-client.yaml b/data/catalog/usps-api-client.yaml deleted file mode 100644 index 0b1e85c..0000000 --- a/data/catalog/usps-api-client.yaml +++ /dev/null @@ -1,38 +0,0 @@ -info: - description: null - title: USPS API client - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (usps-api-client) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 02/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for usps-api-client - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: usps-api-client - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/warehousing.yaml b/data/catalog/warehousing.yaml deleted file mode 100644 index 074aca3..0000000 --- a/data/catalog/warehousing.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Warehousing - version: 1.0.0 - x-cortex-links: - - name: Warehousing Overview - type: documentation - - name: Warehousing Specs - type: specs - - name: Warehousing Bug bashes - type: bug_bashes - x-cortex-tag: warehousing - x-cortex-type: domain - x-cortex-children: - - tag: inventory - - tag: fulfillment - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/run-time/archive-entity.yaml b/data/import/catalog/cli-test-archive-entity.yaml similarity index 60% rename from data/run-time/archive-entity.yaml rename to data/import/catalog/cli-test-archive-entity.yaml index 03c76b5..7bc8b79 100644 --- a/data/run-time/archive-entity.yaml +++ b/data/import/catalog/cli-test-archive-entity.yaml @@ -2,8 +2,7 @@ openapi: 3.0.0 info: title: Archive Entity description: Entity that will be created and then archived to test catalog archive entity - x-cortex-tag: archive-entity - x-cortex-type: component + x-cortex-tag: cli-test-archive-entity + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/create-entity.yaml b/data/import/catalog/cli-test-create-entity.yaml similarity index 56% rename from data/run-time/create-entity.yaml rename to data/import/catalog/cli-test-create-entity.yaml index a5d0a96..5468d0f 100644 --- a/data/run-time/create-entity.yaml +++ b/data/import/catalog/cli-test-create-entity.yaml @@ -2,8 +2,7 @@ openapi: 3.0.0 info: title: Create Entity description: Entity that will be created to test catalog create entity - x-cortex-tag: create-entity - x-cortex-type: component + x-cortex-tag: cli-test-create-entity + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/delete-entity.yaml b/data/import/catalog/cli-test-delete-entity.yaml similarity index 59% rename from data/run-time/delete-entity.yaml rename to data/import/catalog/cli-test-delete-entity.yaml index 0f63e13..a849a4a 100644 --- a/data/run-time/delete-entity.yaml +++ b/data/import/catalog/cli-test-delete-entity.yaml @@ -2,8 +2,7 @@ openapi: 3.0.0 info: title: Delete Entity description: Entity that will be created and then deleted to test catalog delete entity - x-cortex-tag: delete-entity - x-cortex-type: component + x-cortex-tag: cli-test-delete-entity + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/docs-entity.yaml b/data/import/catalog/cli-test-docs-entity.yaml similarity index 76% rename from data/run-time/docs-entity.yaml rename to data/import/catalog/cli-test-docs-entity.yaml index 3201f18..6c4b89d 100644 --- a/data/run-time/docs-entity.yaml +++ b/data/import/catalog/cli-test-docs-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Docs Entity description: Entity that will be created and then updated with OpenAPI docs - x-cortex-tag: docs-entity + x-cortex-tag: cli-test-docs-entity x-cortex-type: service x-cortex-groups: - - public-api-test + - cli-test diff --git a/data/import/catalog/cli-test-domain-child.yaml b/data/import/catalog/cli-test-domain-child.yaml new file mode 100644 index 0000000..4b5dd26 --- /dev/null +++ b/data/import/catalog/cli-test-domain-child.yaml @@ -0,0 +1,9 @@ +openapi: 3.0.0 +info: + title: Test Domain Child + x-cortex-tag: cli-test-domain-child + x-cortex-type: domain + x-cortex-groups: + - cli-test + x-cortex-children: + - tag: cli-test-service diff --git a/data/import/catalog/cli-test-domain-parent.yaml b/data/import/catalog/cli-test-domain-parent.yaml new file mode 100644 index 0000000..188306c --- /dev/null +++ b/data/import/catalog/cli-test-domain-parent.yaml @@ -0,0 +1,9 @@ +openapi: 3.0.0 +info: + title: Test Domain Parent + x-cortex-tag: cli-test-domain-parent + x-cortex-type: domain + x-cortex-groups: + - cli-test + x-cortex-children: + - tag: cli-test-domain-child diff --git a/data/run-time/groups-entity.yaml b/data/import/catalog/cli-test-groups-entity.yaml similarity index 75% rename from data/run-time/groups-entity.yaml rename to data/import/catalog/cli-test-groups-entity.yaml index 42e4743..5e33198 100644 --- a/data/run-time/groups-entity.yaml +++ b/data/import/catalog/cli-test-groups-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Groups Entity description: Entity that will be created and then used in Groups tests - x-cortex-tag: groups-entity + x-cortex-tag: cli-test-groups-entity x-cortex-type: service x-cortex-groups: - - public-api-test + - cli-test diff --git a/data/run-time/create-patch-entity.yaml b/data/import/catalog/cli-test-patch-entity.yaml similarity index 57% rename from data/run-time/create-patch-entity.yaml rename to data/import/catalog/cli-test-patch-entity.yaml index 914b459..1bedce1 100644 --- a/data/run-time/create-patch-entity.yaml +++ b/data/import/catalog/cli-test-patch-entity.yaml @@ -1,12 +1,11 @@ openapi: 3.0.0 info: - title: Patch Entity + title: CLI Test Patch Entity description: Entity that will be created to test catalog patch entity - x-cortex-tag: patch-entity - x-cortex-type: component + x-cortex-tag: cli-test-patch-entity + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test x-cortex-custom-metadata: owners: - owner-1 diff --git a/data/catalog/backend-worker.yaml b/data/import/catalog/cli-test-service-callee.yaml similarity index 99% rename from data/catalog/backend-worker.yaml rename to data/import/catalog/cli-test-service-callee.yaml index 120eacc..a23b6c5 100644 --- a/data/catalog/backend-worker.yaml +++ b/data/import/catalog/cli-test-service-callee.yaml @@ -1,27 +1,10 @@ -openapi: 3.0.0 +openapi: 3.0.1 info: - description: "" - title: Backend Worker - version: 1.0.0 - x-cortex-link: - - name: Error Logs (backend-worker) - type: logs - url: https://cortex.io - - name: Grafana Dashboard - prod - type: dashboard - url: https://cortex.io - - name: Prometheus Dashboard - prod - type: dashboard - url: https://cortex.io - x-cortex-tag: backend-worker - x-cortex-type: component - x-cortex-definition: {} + title: Test Service Callee + x-cortex-tag: cli-test-service-callee + x-cortex-type: service x-cortex-groups: - - public-api-test - - include-metadata-test - - include-links-test - x-cortex-custom-metadata: - cicd: circle-ci + - cli-test servers: - url: https://api.getcortexapp.com description: Cortex Cloud API host diff --git a/data/import/catalog/cli-test-service-caller.yaml b/data/import/catalog/cli-test-service-caller.yaml new file mode 100644 index 0000000..88dae5a --- /dev/null +++ b/data/import/catalog/cli-test-service-caller.yaml @@ -0,0 +1,7 @@ +openapi: 3.0.1 +info: + title: Test Service Caller + x-cortex-tag: cli-test-service-caller + x-cortex-type: service + x-cortex-groups: + - cli-test diff --git a/data/import/catalog/cli-test-service-group-1.yaml b/data/import/catalog/cli-test-service-group-1.yaml new file mode 100644 index 0000000..eee671f --- /dev/null +++ b/data/import/catalog/cli-test-service-group-1.yaml @@ -0,0 +1,8 @@ +openapi: 3.0.1 +info: + title: Test Service Group 1 + x-cortex-tag: cli-test-service-group-1 + x-cortex-type: service + x-cortex-groups: + - cli-test + - cli-test-group-1 diff --git a/data/import/catalog/cli-test-service-group-2.yaml b/data/import/catalog/cli-test-service-group-2.yaml new file mode 100644 index 0000000..ff8d853 --- /dev/null +++ b/data/import/catalog/cli-test-service-group-2.yaml @@ -0,0 +1,8 @@ +openapi: 3.0.1 +info: + title: Test Service Group 2 + x-cortex-tag: cli-test-service-group-2 + x-cortex-type: service + x-cortex-groups: + - cli-test + - cli-test-group-2 diff --git a/data/import/catalog/cli-test-service-links.yaml b/data/import/catalog/cli-test-service-links.yaml new file mode 100644 index 0000000..664b2e8 --- /dev/null +++ b/data/import/catalog/cli-test-service-links.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Service Links + x-cortex-tag: cli-test-service-links + x-cortex-type: service + x-cortex-link: + - url: https://cortex.io + name: Cortex + type: Documentation + x-cortex-groups: + - cli-test + - include-links-test diff --git a/data/import/catalog/cli-test-service-metadata.yaml b/data/import/catalog/cli-test-service-metadata.yaml new file mode 100644 index 0000000..a4d78a8 --- /dev/null +++ b/data/import/catalog/cli-test-service-metadata.yaml @@ -0,0 +1,10 @@ +openapi: 3.0.1 +info: + title: Test Service Metadata + x-cortex-tag: cli-test-service-metadata + x-cortex-type: service + x-cortex-groups: + - cli-test + - include-metadata-test + x-cortex-custom-metadata: + foo: bar diff --git a/data/import/catalog/cli-test-service-test-team-1.yaml b/data/import/catalog/cli-test-service-test-team-1.yaml new file mode 100644 index 0000000..a7df19a --- /dev/null +++ b/data/import/catalog/cli-test-service-test-team-1.yaml @@ -0,0 +1,11 @@ +openapi: 3.0.1 +info: + title: Test Service Team 1 + x-cortex-tag: cli-test-service-team-1 + x-cortex-type: service + x-cortex-owners: + - name: cli-test-team-1 + type: GROUP + provider: CORTEX + x-cortex-groups: + - cli-test diff --git a/data/import/catalog/cli-test-service-test-team-2.yaml b/data/import/catalog/cli-test-service-test-team-2.yaml new file mode 100644 index 0000000..b937107 --- /dev/null +++ b/data/import/catalog/cli-test-service-test-team-2.yaml @@ -0,0 +1,11 @@ +openapi: 3.0.1 +info: + title: Test Service Test Team 2 + x-cortex-tag: cli-test-service-test-team-2 + x-cortex-type: service + x-cortex-owners: + - name: cli-test-team-2 + type: GROUP + provider: CORTEX + x-cortex-groups: + - cli-test diff --git a/data/import/catalog/cli-test-service.yaml b/data/import/catalog/cli-test-service.yaml new file mode 100644 index 0000000..b726b62 --- /dev/null +++ b/data/import/catalog/cli-test-service.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Service + x-cortex-tag: cli-test-service + x-cortex-git: + github: + repository: my-org/my-repo + x-cortex-type: service + x-cortex-groups: + - cli-test + x-cortex-custom-metadata: + cicd: circle-ci diff --git a/data/import/catalog/cli-test-team-1.yaml b/data/import/catalog/cli-test-team-1.yaml new file mode 100644 index 0000000..5fcdb2a --- /dev/null +++ b/data/import/catalog/cli-test-team-1.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Team 1 + x-cortex-tag: cli-test-team-1 + x-cortex-type: team + x-cortex-team: + members: + - email: bubbles.harnis@trailer-park.io + name: Bubbles Harnis + notificationsEnabled: false + x-cortex-groups: + - cli-test diff --git a/data/import/catalog/cli-test-team-2.yaml b/data/import/catalog/cli-test-team-2.yaml new file mode 100644 index 0000000..15381fe --- /dev/null +++ b/data/import/catalog/cli-test-team-2.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Team 2 + x-cortex-tag: cli-test-team-2 + x-cortex-type: team + x-cortex-team: + members: + - email: jim.lahey@trailer-park.io + name: Jim Lahey + notificationsEnabled: false + x-cortex-groups: + - cli-test diff --git a/data/import/catalog/cli-test-team-child.yaml b/data/import/catalog/cli-test-team-child.yaml new file mode 100644 index 0000000..2d6bd00 --- /dev/null +++ b/data/import/catalog/cli-test-team-child.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Team Child + x-cortex-tag: cli-test-team-child + x-cortex-type: team + x-cortex-team: + members: + - email: bubbles.harnis@trailer-park.io + name: Bubbles Harnis + notificationsEnabled: false + x-cortex-groups: + - cli-test diff --git a/data/import/catalog/cli-test-team-parent.yaml b/data/import/catalog/cli-test-team-parent.yaml new file mode 100644 index 0000000..17382ee --- /dev/null +++ b/data/import/catalog/cli-test-team-parent.yaml @@ -0,0 +1,14 @@ +openapi: 3.0.1 +info: + title: Test Team Parent + x-cortex-tag: cli-test-team-parent + x-cortex-type: team + x-cortex-team: + members: + - email: richard.lafleur@trailer-park.io + name: Richard LaFleur + notificationsEnabled: false + x-cortex-children: + - tag: cli-test-team-child + x-cortex-groups: + - cli-test diff --git a/data/run-time/unarchive-entity.yaml b/data/import/catalog/cli-test-unarchive-entity.yaml similarity index 57% rename from data/run-time/unarchive-entity.yaml rename to data/import/catalog/cli-test-unarchive-entity.yaml index 135a4fd..bc25539 100644 --- a/data/run-time/unarchive-entity.yaml +++ b/data/import/catalog/cli-test-unarchive-entity.yaml @@ -2,8 +2,7 @@ openapi: 3.0.0 info: title: Unarchive Entity description: Entity that will be created, archived and then un-archived. - x-cortex-tag: unarchive-entity - x-cortex-type: component + x-cortex-tag: cli-test-unarchive-entity + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/import/entity-types/cli-test.json b/data/import/entity-types/cli-test.json new file mode 100644 index 0000000..132a29e --- /dev/null +++ b/data/import/entity-types/cli-test.json @@ -0,0 +1,6 @@ +{ + "description": "This is a test entity type definition.", + "name": "CLI Test With Empty Schema", + "schema": {}, + "type": "cli-test" +} diff --git a/data/import/ip-allowlist/ip-allowlist.json b/data/import/ip-allowlist/ip-allowlist.json new file mode 100644 index 0000000..3770478 --- /dev/null +++ b/data/import/ip-allowlist/ip-allowlist.json @@ -0,0 +1,8 @@ +{ + "entries": [ + { + "address": "0.0.0.0/0", + "description": "all IPv4 rangeP" + } + ] +} diff --git a/data/import/plugins/cli-test-plugin.json b/data/import/plugins/cli-test-plugin.json new file mode 100644 index 0000000..2078298 --- /dev/null +++ b/data/import/plugins/cli-test-plugin.json @@ -0,0 +1,19 @@ +{ + "blob": "

This is a simple plugin

", + "contexts": [ + { + "type": "GLOBAL" + }, + { + "entityFilter": { + "type": "SERVICE_FILTER" + }, + "type": "ENTITY" + } + ], + "description": "Simple Plugin", + "isDraft": false, + "minimumRoleRequired": "VIEWER", + "name": "CLI Test Plugin", + "tag": "cli-test-plugin" +} diff --git a/tests.orig/test_scorecards_draft.yaml b/data/import/scorecards/cli-test-draft-scorecard.yaml similarity index 87% rename from tests.orig/test_scorecards_draft.yaml rename to data/import/scorecards/cli-test-draft-scorecard.yaml index c16e9c1..72f4e86 100644 --- a/tests.orig/test_scorecards_draft.yaml +++ b/data/import/scorecards/cli-test-draft-scorecard.yaml @@ -1,5 +1,5 @@ -tag: test-scorecard-draft -name: Test Scorecard Draft +tag: cli-test-draft-scorecard +name: CLI Test Draft Scorecard description: Used to test Cortex CLI draft: true ladder: diff --git a/tests.orig/test_scorecards.yaml b/data/import/scorecards/cli-test-scorecard.yaml similarity index 53% rename from tests.orig/test_scorecards.yaml rename to data/import/scorecards/cli-test-scorecard.yaml index d9bd412..33beaed 100644 --- a/tests.orig/test_scorecards.yaml +++ b/data/import/scorecards/cli-test-scorecard.yaml @@ -1,5 +1,5 @@ -tag: test-scorecard -name: Test Scorecard +tag: cli-test-scorecard +name: CLI Test Scorecard description: Used to test Cortex CLI draft: false ladder: @@ -7,9 +7,15 @@ ladder: levels: - name: You Made It rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" + description: "My boring description" color: 7cf376 rules: +- title: Is Definitely False + expression: custom("testField") == "100" + weight: 1 + level: You Made It + filter: + category: SERVICE - title: Has Custom Data expression: custom("testField") != null weight: 1 @@ -17,5 +23,5 @@ rules: filter: category: SERVICE filter: - query: 'entity_descriptor.info.`x-cortex-tag` = "test-service"' + query: 'entity.tag() == "cli-test-service"' category: SERVICE diff --git a/data/import/workflows/cli-test-workflow.yaml b/data/import/workflows/cli-test-workflow.yaml new file mode 100644 index 0000000..7389049 --- /dev/null +++ b/data/import/workflows/cli-test-workflow.yaml @@ -0,0 +1,20 @@ +name: CLI Test Workflowe +tag: cli-test-workflow +description: My test workflow. +isDraft: true +filter: + type: GLOBAL +runResponseTemplate: null +actions: +- name: Send message + slug: send-message + schema: + channel: air-force + message: "Workflow submitted by: {{context.initiatedBy.name}}\nRoles: \n{{#context.initiatedBy.roles}}\n\ + role name: {{name}}\n{{/context.initiatedBy.roles}}" + type: SLACK + outgoingActions: [] + isRootAction: true +runRestrictionPolicies: [] +iconTag: null + diff --git a/data/resource-definitions/api.json b/data/resource-definitions/api.json deleted file mode 100644 index 369f0d7..0000000 --- a/data/resource-definitions/api.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": "api", - "source": "CUSTOM", - "name": "API", - "description": "A Cortex API method.", - "schema": { - "type": "object", - "required": [], - "properties": {} - } -} diff --git a/data/resource-definitions/component.json b/data/resource-definitions/component.json deleted file mode 100644 index bed7e7f..0000000 --- a/data/resource-definitions/component.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "type": "component", - "source": "CUSTOM", - "name": "Component", - "description": "Used for public-api-test data.", - "schema": { - "type": "object" - } -} diff --git a/data/resource-definitions/k8s-cluster.json b/data/resource-definitions/k8s-cluster.json deleted file mode 100644 index 3c0dccc..0000000 --- a/data/resource-definitions/k8s-cluster.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "type": "k8s-cluster", - "source": "CUSTOM", - "name": "K8s Cluster", - "description": null, - "schema": { - "type": "object" - } -} diff --git a/data/resource-definitions/kafka-topic.json b/data/resource-definitions/kafka-topic.json deleted file mode 100644 index 68252cf..0000000 --- a/data/resource-definitions/kafka-topic.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "type": "kafka-topic", - "source": "CUSTOM", - "name": "Kafka Topic", - "description": null, - "schema": { - "type": "object", - "required": [ - "topicName", - "cluster", - "serializationTool" - ], - "properties": { - "cluster": { - "type": "string" - }, - "topicName": { - "type": "string" - }, - "serializationTool": { - "type": "string" - } - } - } -} diff --git a/data/resource-definitions/squid-proxy.json b/data/resource-definitions/squid-proxy.json deleted file mode 100644 index 45fca39..0000000 --- a/data/resource-definitions/squid-proxy.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "squid-proxy", - "source": "CUSTOM", - "name": "Squid Proxy", - "description": null, - "schema": { - "type": "object", - "required": [ - "ip", - "vpc" - ], - "properties": { - "ip": { - "type": "string" - }, - "vpc": { - "type": "string" - }, - "resources": { - "type": "string" - } - } - } -} diff --git a/data/run-time/3 b/data/run-time/3 deleted file mode 100644 index 36979f5..0000000 --- a/data/run-time/3 +++ /dev/null @@ -1,11 +0,0 @@ -- Stefanos - - scorecards - - used to be a lot faster, ran for hours - - permissions model - - viewer for users -> can request an exemption - - scorecards that target multiple entities, ie service or team - - -- Fred and Pradeep - - Lisa had a few questions - ad - - Migration global regional resources -> ran some script diff --git a/data/run-time/create-dryrun.yaml b/data/run-time/create-dryrun.yaml deleted file mode 100644 index 0cf1e9d..0000000 --- a/data/run-time/create-dryrun.yaml +++ /dev/null @@ -1,6 +0,0 @@ -openapi: 3.0.0 -info: - title: Create Entity DryRun - description: Entity that should never be created; only used to test catalog dryRun - x-cortex-tag: create-entity-dryrun - x-cortex-type: service diff --git a/data/run-time/create-entity-type-empty-schema.json b/data/run-time/create-entity-type-empty-schema.json deleted file mode 100644 index e6521c1..0000000 --- a/data/run-time/create-entity-type-empty-schema.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "description": "This is a test resource definition.", - "name": "Public API Type With Empty Schema", - "schema": {}, - "type": "public-api-type-empty-schema" -} diff --git a/data/run-time/custom-data-bulk.json b/data/run-time/custom-data-bulk.json index 15538fc..3f3056a 100644 --- a/data/run-time/custom-data-bulk.json +++ b/data/run-time/custom-data-bulk.json @@ -1,6 +1,6 @@ { "values": { - "backend-worker": [ + "cli-test-service-caller": [ { "key": "bulk-key-1", "value": "value-1" @@ -10,7 +10,7 @@ "value": "value-2" } ], - "ach-payments-nacha": [ + "cli-test-service-callee": [ { "key": "bulk-key-3", "value": "value-3" diff --git a/data/run-time/custom-events.json b/data/run-time/custom-events.json index dfd1948..3cc7847 100644 --- a/data/run-time/custom-events.json +++ b/data/run-time/custom-events.json @@ -4,7 +4,7 @@ "test2": "someVal2" }, "description": "Validate event", - "timestamp": "2023-10-10T13:27:51.226Z", + "timestamp": "2023-10-10T13:27:51", "title": "validate event", "type": "VALIDATE_SERVICE" } diff --git a/data/run-time/dependencies-bulk.json b/data/run-time/dependencies-bulk.json index f2c6fe4..fe0d250 100644 --- a/data/run-time/dependencies-bulk.json +++ b/data/run-time/dependencies-bulk.json @@ -1,6 +1,6 @@ { "values": { - "fraud-analyzer": [ + "cli-test-service-caller": [ { "description": "dependency description", "metadata": { @@ -9,7 +9,7 @@ }, "method": "GET", "path": "/api/v1/github/configurations", - "tag": "backend-worker" + "tag": "cli-test-service-callee" } ] } diff --git a/data/run-time/docs.yaml b/data/run-time/docs.yaml index 2a4a2c9..053daca 100644 --- a/data/run-time/docs.yaml +++ b/data/run-time/docs.yaml @@ -6,7 +6,7 @@ paths: /: get: operationId: listVersionsv2 - summary: List API versions + summary: List API versions with 'full' history responses: "200": description: 200 response diff --git a/data/run-time/entity-type-update.json b/data/run-time/entity-type-update.json new file mode 100644 index 0000000..c2f8ad1 --- /dev/null +++ b/data/run-time/entity-type-update.json @@ -0,0 +1,5 @@ +{ + "description": "This is a test entity type definition.", + "name": "CLI Test With Empty Schema -- Update", + "schema": {} +} diff --git a/data/run-time/patch-entity.yaml b/data/run-time/patch-entity.yaml index 5e7b4c4..95a5c5b 100644 --- a/data/run-time/patch-entity.yaml +++ b/data/run-time/patch-entity.yaml @@ -1,6 +1,6 @@ openapi: 3.0.0 info: - x-cortex-tag: patch-entity + x-cortex-tag: cli-test-patch-entity x-cortex-custom-metadata: owners: - owner-2 diff --git a/data/run-time/scorecard.yaml b/data/run-time/scorecard.yaml deleted file mode 100644 index 910e1ef..0000000 --- a/data/run-time/scorecard.yaml +++ /dev/null @@ -1,21 +0,0 @@ -tag: public-api-test-scorecard -name: Public API Test Scorecard -description: Used to test Cortex public API -draft: false -ladder: - name: Default Ladder - levels: - - name: Gold - rank: 1 - description: Gold tier - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("hasSecurityScans") != null - weight: 1 - level: Gold - filter: - category: SERVICE -filter: - query: 'entity.tag() == "user-profile-metadata-service"' - category: SERVICE diff --git a/data/run-time/scorecard_drafts.yaml b/data/run-time/scorecard_drafts.yaml deleted file mode 100644 index 322c13f..0000000 --- a/data/run-time/scorecard_drafts.yaml +++ /dev/null @@ -1,21 +0,0 @@ -tag: public-api-test-draft-scorecard -name: Public API Test Draft Scorecard -description: Used to test Scorecard drafts with Cortex public API -draft: true -ladder: - name: Default Ladder - levels: - - name: Gold - rank: 1 - description: Gold tier - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("hasSecurityScans") != null - weight: 1 - level: Gold - filter: - category: SERVICE -filter: - query: 'entity.tag() == "user-profile-metadata-service"' - category: SERVICE diff --git a/data/run-time/test-workflows.json b/data/run-time/test-workflows.json new file mode 100644 index 0000000..d95f866 --- /dev/null +++ b/data/run-time/test-workflows.json @@ -0,0 +1,24 @@ +{ + "name": "Hello World", + "tag": "hello-world", + "description": "Simple workflow", + "isDraft": true, + "filter": { + "type": "GLOBAL" + }, + "runResponseTemplate": null, + "actions": [ + { + "name": "Send Message", + "slug": "send-message", + "schema": { + "channel": "my-slack-channel", + "message": "Hello, world.", + "type": "SLACK" + }, + "outgoingActions": [], + "isRootAction": true + } + ], + "runRestrictionPolicies": [] +} diff --git a/data/run-time/test_plugins.json b/data/run-time/test_plugins.json index c3195d1..aa30aae 100644 --- a/data/run-time/test_plugins.json +++ b/data/run-time/test_plugins.json @@ -14,7 +14,7 @@ "description": "Just testin' plugin uploads", "isDraft": false, "minimumRoleRequired": "VIEWER", - "name": "Public API Test Plugin", - "tag": "public-api-test-plugin" + "name": "CLI Test Plugin", + "tag": "cli-test-plugin" } diff --git a/data/run-time/test_plugins_invalid_role.json b/data/run-time/test_plugins_invalid_role.json index 45ed9a6..0fd6c0a 100644 --- a/data/run-time/test_plugins_invalid_role.json +++ b/data/run-time/test_plugins_invalid_role.json @@ -14,7 +14,7 @@ "description": "Just testing plugin permissions", "isDraft": true, "minimumRoleRequired": "ADMIN", - "name": "Public API Test Plugin Admin", - "tag": "public-api-test-plugin-admin" + "name": "CLI Test Plugin Admin", + "tag": "cli-test-plugin-admin" } diff --git a/data/run-time/test_plugins_manager.json b/data/run-time/test_plugins_manager.json index 68cd95b..35a4bbf 100644 --- a/data/run-time/test_plugins_manager.json +++ b/data/run-time/test_plugins_manager.json @@ -14,7 +14,7 @@ "description": "Just testing plugin permissions", "isDraft": true, "minimumRoleRequired": "MANAGER", - "name": "Public API Test Plugin Manager", - "tag": "public-api-test-plugin-manager" + "name": "CLI Test Plugin Manager", + "tag": "cli-test-plugin-manager" } diff --git a/data/run-time/update-entity-type-empty-schema.json b/data/run-time/update-entity-type-empty-schema.json deleted file mode 100644 index a18d0ae..0000000 --- a/data/run-time/update-entity-type-empty-schema.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "description": "This is a test resource definition.", - "name": "Public API Type With Empty Schema -- Update", - "schema": {} -} diff --git a/data/scorecards/dumb.yaml b/data/scorecards/dumb.yaml deleted file mode 100644 index f9132f8..0000000 --- a/data/scorecards/dumb.yaml +++ /dev/null @@ -1,28 +0,0 @@ -tag: dumb-test -name: Dumb test -description: test -draft: false -notifications: - enabled: true -ladder: - name: Default Ladder - levels: - - name: Bronze - rank: 1 - color: '#cd7f32' - - name: Silver - rank: 2 - color: '#c0c0c0' - - name: Gold - rank: 3 - color: '#ffd700' -rules: -- title: Has link to logs - description: test - expression: links("logs").length > 0 - weight: 1 - level: Gold - failureMessage: "Blah, blah, blah" -filter: - query: hasGroup("public-api-test") and entity.type() == "component" - category: RESOURCE diff --git a/data/scorecards/production-readiness.yaml b/data/scorecards/production-readiness.yaml deleted file mode 100644 index d30a437..0000000 --- a/data/scorecards/production-readiness.yaml +++ /dev/null @@ -1,28 +0,0 @@ -tag: public-api-test-production-readiness -name: Public API Test Production Readiness -description: test -draft: false -notifications: - enabled: true -ladder: - name: Default Ladder - levels: - - name: Bronze - rank: 1 - color: '#cd7f32' - - name: Silver - rank: 2 - color: '#c0c0c0' - - name: Gold - rank: 3 - color: '#ffd700' -rules: -- title: Has link to logs - description: test - expression: links("logs").length > 0 - weight: 1 - level: Gold - failureMessage: "Blah, blah, blah" -filter: - query: hasGroup("public-api-test") and entity.type() == "component" - category: RESOURCE diff --git a/poetry.lock b/poetry.lock index 4095299..f803089 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,189 +1,226 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "certifi" -version = "2024.7.4" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" -version = "7.5.4" +version = "7.8.2" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["test"] files = [ - {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, - {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, - {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, - {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, - {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, - {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, - {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, - {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, - {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, - {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a"}, + {file = "coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404"}, + {file = "coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7"}, + {file = "coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347"}, + {file = "coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9"}, + {file = "coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54"}, + {file = "coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a"}, + {file = "coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975"}, + {file = "coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53"}, + {file = "coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c"}, + {file = "coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f"}, + {file = "coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8"}, + {file = "coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223"}, + {file = "coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f"}, + {file = "coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca"}, + {file = "coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48"}, + {file = "coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7"}, + {file = "coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3"}, + {file = "coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7"}, + {file = "coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008"}, + {file = "coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199"}, + {file = "coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8"}, + {file = "coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d"}, + {file = "coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b"}, + {file = "coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a"}, + {file = "coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7"}, + {file = "coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a"}, + {file = "coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e"}, + {file = "coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837"}, + {file = "coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32"}, + {file = "coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27"}, ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "execnet" @@ -191,6 +228,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -201,68 +239,217 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +groups = ["main", "dev"] files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] [[package]] name = "packaging" -version = "24.1" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev", "test"] files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev", "test"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev", "test"] files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -273,6 +460,7 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -285,15 +473,55 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-html" +version = "4.1.1" +description = "pytest plugin for generating HTML reports" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_html-4.1.1-py3-none-any.whl", hash = "sha256:c8152cea03bd4e9bee6d525573b67bbc6622967b72b9628dda0ea3e2a0b5dd71"}, + {file = "pytest_html-4.1.1.tar.gz", hash = "sha256:70a01e8ae5800f4a074b56a4cb1025c8f4f9b038bba5fe31e3c98eb996686f07"}, +] + +[package.dependencies] +jinja2 = ">=3.0.0" +pytest = ">=7.0.0" +pytest-metadata = ">=2.0.0" + +[package.extras] +docs = ["pip-tools (>=6.13.0)"] +test = ["assertpy (>=1.1)", "beautifulsoup4 (>=4.11.1)", "black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "pytest-mock (>=3.7.0)", "pytest-rerunfailures (>=11.1.2)", "pytest-xdist (>=2.4.0)", "selenium (>=4.3.0)", "tox (>=3.24.5)"] + +[[package]] +name = "pytest-metadata" +version = "3.1.1" +description = "pytest plugin for test session metadata" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b"}, + {file = "pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (>=3.24.5)"] + [[package]] name = "pytest-xdist" -version = "3.6.1" +version = "3.7.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, - {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, + {file = "pytest_xdist-3.7.0-py3-none-any.whl", hash = "sha256:7d3fbd255998265052435eb9daa4e99b62e6fb9cfb6efd1f858d4d8c0c7f0ca0"}, + {file = "pytest_xdist-3.7.0.tar.gz", hash = "sha256:f9248c99a7c15b7d2f90715df93610353a485827bc06eefb6566d23f6400f126"}, ] [package.dependencies] @@ -307,78 +535,82 @@ testing = ["filelock"] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -388,13 +620,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.25.3" +version = "0.25.7" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, - {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, + {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, + {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, ] [package.dependencies] @@ -403,26 +636,88 @@ requests = ">=2.30.0,<3.0" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "rich" +version = "14.0.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "typer" +version = "0.12.5" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, + {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.11" -content-hash = "351296b2fe9693563eb66b5c7ceb8d7b0e86cd3efb405418303dd923b7ed1bf7" +content-hash = "e84a00708240cc81582242e25f476b02a336519cda76c4311449d92e590c5898" diff --git a/pyproject.toml b/pyproject.toml index 6e3ed38..3f13bb3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,12 +17,14 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.11" -requests = ">= 2.32.3, < 3" +requests = "^2.32.4" pyyaml = ">= 6.0.1, < 7" urllib3 = ">= 2.2.2" +typer = "^0.12.5" +click = "<8.2" [tool.poetry.scripts] -cortex = "cortexapps_cli.cortex:cli" +cortex = "cortexapps_cli.cli:app" [tool.poetry.group.test.dependencies] pytest = "^8.2.2" @@ -32,6 +34,7 @@ pytest-cov = "^5.0.0" [tool.poetry.group.dev.dependencies] pytest-xdist = "^3.6.1" responses = "^0.25.3" +pytest-html = "^4.1.1" [build-system] requires = ["poetry-core"] diff --git a/tests.orig/custom-events-configure.json b/tests.orig/custom-events-configure.json deleted file mode 100644 index c631b26..0000000 --- a/tests.orig/custom-events-configure.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "customData": { - "test3": 678, - "test4": "someVal3" - }, - "description": "Config event", - "timestamp": "2023-10-08T13:27:51.226Z", - "title": "config event", - "type": "CONFIG_SERVICE" -} diff --git a/tests.orig/custom-events.json b/tests.orig/custom-events.json deleted file mode 100644 index dfd1948..0000000 --- a/tests.orig/custom-events.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "customData": { - "test1": 345, - "test2": "someVal2" - }, - "description": "Validate event", - "timestamp": "2023-10-10T13:27:51.226Z", - "title": "validate event", - "type": "VALIDATE_SERVICE" -} diff --git a/tests.orig/data/catalog/cli-test-service-with-groups.yaml b/tests.orig/data/catalog/cli-test-service-with-groups.yaml deleted file mode 100644 index 1d31ec3..0000000 --- a/tests.orig/data/catalog/cli-test-service-with-groups.yaml +++ /dev/null @@ -1,13 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Service With Groups - x-cortex-tag: cli-test-service-with-groups - x-cortex-type: service - x-cortex-groups: - - corona-spokesperson - x-cortex-owners: - - name: test-team-1 - type: GROUP - provider: CORTEX - x-cortex-custom-metadata: - foo: bar diff --git a/tests.orig/data/catalog/cli-test-service.yaml b/tests.orig/data/catalog/cli-test-service.yaml deleted file mode 100644 index 63d87a3..0000000 --- a/tests.orig/data/catalog/cli-test-service.yaml +++ /dev/null @@ -1,71 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Service - x-cortex-git: - github: - repository: snoop-dogg/woof - x-cortex-tag: cli-test-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io - x-cortex-custom-metadata: - foo: bar - testField: abc -paths: - /: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - put: - summary: Set version - operationId: SetVersions - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - /2.0/users/{username}: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z diff --git a/tests.orig/data/catalog/test-service.yaml b/tests.orig/data/catalog/test-service.yaml deleted file mode 100644 index 16d10df..0000000 --- a/tests.orig/data/catalog/test-service.yaml +++ /dev/null @@ -1,83 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Service - x-cortex-git: - github: - repository: snoop-dogg/woof - x-cortex-tag: test-service - x-cortex-type: service - x-cortex-groups: - - corona-spokesperson - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io - - name: test-team-1 - type: GROUP - provider: CORTEX - x-cortex-custom-metadata: - foo: bar - testField: 123 - x-cortex-link: - - name: Cortex Homepage - type: DOCUMENTATION - url: https://cortex.io - - name: Cortex Documentation - type: DOCUMENTATION - url: https://docs.cortex.io -paths: - /: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - put: - summary: Set version - operationId: SetVersions - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - /2.0/users/{username}: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z diff --git a/tests.orig/data/catalog/test-team-1.yaml b/tests.orig/data/catalog/test-team-1.yaml deleted file mode 100644 index 99b6d47..0000000 --- a/tests.orig/data/catalog/test-team-1.yaml +++ /dev/null @@ -1,10 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Team 1 - x-cortex-tag: test-team-1 - x-cortex-team: - members: - - name: Jeff Schnitter - email: jeff.schnitter@cortex.io - notificationsEnabled: true - x-cortex-type: team diff --git a/tests.orig/data/catalog/test-team-2.yaml b/tests.orig/data/catalog/test-team-2.yaml deleted file mode 100644 index 2fe824e..0000000 --- a/tests.orig/data/catalog/test-team-2.yaml +++ /dev/null @@ -1,10 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Team 2 - x-cortex-tag: test-team-2 - x-cortex-team: - members: - - name: Joe Montana - email: joe.montana@cortex.io - notificationsEnabled: true - x-cortex-type: team diff --git a/tests.orig/data/catalog/test_dependencies_dependency_service.yaml b/tests.orig/data/catalog/test_dependencies_dependency_service.yaml deleted file mode 100644 index 27780ee..0000000 --- a/tests.orig/data/catalog/test_dependencies_dependency_service.yaml +++ /dev/null @@ -1,13 +0,0 @@ -openapi: 3.0.1 -info: - title: Dependency Service - x-cortex-tag: dependency-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: richard.lafleur@cortex.io - x-cortex-dependency: - - tag: cli-test-service - path: "/2.0/users/{username}" - method: GET - description: ensure user has payment information configured diff --git a/tests.orig/data/json/resource-definitions.json b/tests.orig/data/json/resource-definitions.json deleted file mode 100644 index d86ddeb..0000000 --- a/tests.orig/data/json/resource-definitions.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "definitions": [ - { - "type": "test-resource-definition", - "source": "CUSTOM", - "name": "Test Resource Defintion -- Update", - "description": "This is a test resource definition.", - "schema": { - "type": "object" - } - } - ] -} diff --git a/tests.orig/data/json/resource_definitions.json b/tests.orig/data/json/resource_definitions.json deleted file mode 100644 index e69de29..0000000 diff --git a/tests.orig/data/json/test-resource-definition.json b/tests.orig/data/json/test-resource-definition.json deleted file mode 100644 index 6ca53c7..0000000 --- a/tests.orig/data/json/test-resource-definition.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"test-resource-definition","source":"CUSTOM","name":"Test Resource Defintion -- Update","description":"This is a test resource definition.","schema":{"type":"object"}} diff --git a/tests.orig/data/resource-definitions/test-resource-definition.json b/tests.orig/data/resource-definitions/test-resource-definition.json deleted file mode 100644 index d956988..0000000 --- a/tests.orig/data/resource-definitions/test-resource-definition.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"test-resource-definition","source":"CUSTOM","name":"Test Resource Definition","description":"This is a test resource definition.","schema":{"type":"object"}} diff --git a/tests.orig/data/scorecards/test-scorecard-draft.yaml b/tests.orig/data/scorecards/test-scorecard-draft.yaml deleted file mode 100644 index c16e9c1..0000000 --- a/tests.orig/data/scorecards/test-scorecard-draft.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard-draft -name: Test Scorecard Draft -description: Used to test Cortex CLI -draft: true -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "cli-test-service" - category: SERVICE diff --git a/tests.orig/data/scorecards/test-scorecard.yaml b/tests.orig/data/scorecards/test-scorecard.yaml deleted file mode 100644 index b2c11bc..0000000 --- a/tests.orig/data/scorecards/test-scorecard.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard -name: Test Scorecard -description: Used to test Cortex CLI -draft: false -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "cli-test-service" - category: SERVICE diff --git a/tests.orig/dependency-update.json b/tests.orig/dependency-update.json deleted file mode 100644 index f4ff253..0000000 --- a/tests.orig/dependency-update.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "This is the updated description of the dependency", - "metadata": { - "newField": "newField data", - "newField1": "newField1 data" - } -} diff --git a/tests.orig/test-custom-data-array.json b/tests.orig/test-custom-data-array.json deleted file mode 100644 index b69dbc1..0000000 --- a/tests.orig/test-custom-data-array.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "values": { - "test-service": [ - { - "key": "checklist", - "value": { - "ada": "yes", - "pii": "n/a", - "itar": "no", - "date": "Fri Aug 25 2023", - "signed_by": "Jeff Schnitter" - } - } - ] - } -} diff --git a/tests.orig/test-custom-data-bulk.json b/tests.orig/test-custom-data-bulk.json deleted file mode 100644 index be5cb7f..0000000 --- a/tests.orig/test-custom-data-bulk.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "values": { - "test-service": [ - { - "key": "bulk-key-1", - "value": "key-1" - }, - { - "key": "bulk-key-2", - "value": "key-2" - } - ], - "test-service": [ - { - "key": "bulk-key-3", - "value": "key-3" - }, - { - "key": "bulk-key-4", - "value": "key-4" - } - ] - } -} diff --git a/tests.orig/test-custom-data.json b/tests.orig/test-custom-data.json deleted file mode 100644 index d2d5bdb..0000000 --- a/tests.orig/test-custom-data.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "description": "custom data field description", - "key": "anotherTestField", - "value": "123" -} diff --git a/tests.orig/test-resource-definition-update.json b/tests.orig/test-resource-definition-update.json deleted file mode 100644 index d5438ca..0000000 --- a/tests.orig/test-resource-definition-update.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "description": "This is a test resource definition.", - "name": "Test Resource Defintion -- Update", - "schema": {} -} diff --git a/tests.orig/test-resource-definition.json b/tests.orig/test-resource-definition.json deleted file mode 100644 index 360c21d..0000000 --- a/tests.orig/test-resource-definition.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "description": "This is a test resource definition.", - "name": "Test Resource Defintion", - "schema": {}, - "type": "test-resource-definition" -} diff --git a/tests.orig/test_audit_logs.py b/tests.orig/test_audit_logs.py deleted file mode 100644 index 1acb14e..0000000 --- a/tests.orig/test_audit_logs.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Tests for the audit-logs commands. -""" -from cortexapps_cli.cortex import cli -from datetime import datetime, timedelta, timezone -import json -import sys -import pytest - -def test_audit_logs_get(): - cli(["audit-logs", "get"]) - -def test_audit_logs_page_size(capsys): - cli(["audit-logs", "get", "-p", "1", "-z", "5"]) - out, err = capsys.readouterr() - out = json.loads(out) - assert len(out['logs']) == 5, "Changing page size should return requested amount of entries" - -def test_audit_logs_with_start_and_end(capsys): - now = datetime.now() - yesterday = now - timedelta(days=1) - cli(["audit-logs", "get", "-e", now.isoformat(), "-s", yesterday.isoformat()]) - -def test_audit_logs_with_start(): - now = datetime.now() - yesterday = now - timedelta(days=1) - cli(["audit-logs", "get", "-s", yesterday.isoformat()]) - -def test_audit_logs_with_end(): - now = datetime.now() - yesterday = now - timedelta(days=1) - cli(["audit-logs", "get", "-e", yesterday.isoformat()]) diff --git a/tests.orig/test_backup.py b/tests.orig/test_backup.py deleted file mode 100644 index 37da787..0000000 --- a/tests.orig/test_backup.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Tests for backup commands. -""" -from cortexapps_cli.cortex import cli - -import pytest -import sys - -def test_import(capsys): - cli(["backup", "import", "-d", "tests/test_backup_export"]) - diff --git a/tests.orig/test_backup_export/catalog/test-service-import-2.yaml b/tests.orig/test_backup_export/catalog/test-service-import-2.yaml deleted file mode 100644 index 3835c8d..0000000 --- a/tests.orig/test_backup_export/catalog/test-service-import-2.yaml +++ /dev/null @@ -1,5 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Service Import 2 - x-cortex-tag: test-service-import-2 - x-cortex-type: service diff --git a/tests.orig/test_backup_export/catalog/test-service-import.yaml b/tests.orig/test_backup_export/catalog/test-service-import.yaml deleted file mode 100644 index 06b4494..0000000 --- a/tests.orig/test_backup_export/catalog/test-service-import.yaml +++ /dev/null @@ -1,5 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Service Import - x-cortex-tag: cli-test-service-import - x-cortex-type: service diff --git a/tests.orig/test_backup_export/json/ip-allowlist.json b/tests.orig/test_backup_export/json/ip-allowlist.json deleted file mode 100644 index 6a480c6..0000000 --- a/tests.orig/test_backup_export/json/ip-allowlist.json +++ /dev/null @@ -1 +0,0 @@ -{"entries":[]} diff --git a/tests.orig/test_backup_export/resource-definitions/test-resource-definition.json b/tests.orig/test_backup_export/resource-definitions/test-resource-definition.json deleted file mode 100644 index d956988..0000000 --- a/tests.orig/test_backup_export/resource-definitions/test-resource-definition.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"test-resource-definition","source":"CUSTOM","name":"Test Resource Definition","description":"This is a test resource definition.","schema":{"type":"object"}} diff --git a/tests.orig/test_backup_export/scorecards/test-scorecard.yaml b/tests.orig/test_backup_export/scorecards/test-scorecard.yaml deleted file mode 100644 index 6db0c32..0000000 --- a/tests.orig/test_backup_export/scorecards/test-scorecard.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard-import -name: Test Scorecard Import -description: Used to test Cortex CLI -draft: false -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "test-service" - category: SERVICE diff --git a/tests.orig/test_backup_export/teams/test-team-3.json b/tests.orig/test_backup_export/teams/test-team-3.json deleted file mode 100644 index 7bbff10..0000000 --- a/tests.orig/test_backup_export/teams/test-team-3.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "teamTag": "test-team-3", - "metadata": { - "name": "Test Team 3", - "description": null, - "summary": null - }, - "links": [], - "slackChannels": [], - "additionalMembers": [], - "isArchived": false, - "cortexTeam": { - "members": [ - { - "email": "jeff.schnitter@cortex.io", - "name": "Jeff Schnitter", - "description": "", - "role": null - } - ] - }, - "type": "CORTEX" -} diff --git a/tests.orig/test_catalog-invalid-service.yaml b/tests.orig/test_catalog-invalid-service.yaml deleted file mode 100644 index ab87147..0000000 --- a/tests.orig/test_catalog-invalid-service.yaml +++ /dev/null @@ -1,8 +0,0 @@ -openapi: 3.0.1 -info: - title: Invalid Service to test dryrun - x-cortex-bag: invalid-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io diff --git a/tests.orig/test_catalog.py b/tests.orig/test_catalog.py deleted file mode 100644 index 7f781ee..0000000 --- a/tests.orig/test_catalog.py +++ /dev/null @@ -1,107 +0,0 @@ -""" -Tests for the catalog methods. -""" - -from cortexapps_cli.cortex import cli -import json -import pytest -import sys - -def test_catalog_create_service(capsys): - cli(["catalog", "create", "-f", "tests/test_catalog_create_service.yaml"]) - -def test_retrieve_service(capsys): - cli(["catalog", "descriptor", "-y", "-t", "cli-test-service"]) - -def test_dryrun(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "create", "-d", "-f", "tests/test_catalog-invalid-service.yaml"]) - out, err = capsys.readouterr() - assert json.loads(out)['type'] == "BAD_REQUEST" - -def test_details(capsys): - cli(["catalog", "details", "-t", "cli-test-service"]) - -def test_details_hierarchy_fields(capsys): - cli(["catalog", "details", "-i", "groups", "-t", "cli-test-service"]) - -def test_list(capsys): - cli(["catalog", "list"]) - -def test_list_page(capsys): - cli(["catalog", "list", "-p", "0"]) - -def test_list_page_size(capsys): - cli(["catalog", "list", "-z", "100"]) - -def test_list_include_hierarchy_fields(capsys): - cli(["catalog", "list", "-i", "groups", "-z", "50"]) - -def test_list_page_and_page_size(capsys): - cli(["catalog", "list", "-p", "0", "-z", "2"]) - out, err = capsys.readouterr() - assert len(json.loads(out)['entities']) == 2 - -def test_list_with_parms(capsys): - cli(["catalog", "list", "-g", "corona-spokesperson", "-d", "1", "-t", "service", "-a", "-m" ]) - out, err = capsys.readouterr() - out = json.loads(out) - assert any(service['tag'] == 'cli-test-service-with-groups' for service in out['entities']) - assert not(out['entities'][0]['metadata'][0]["key"] is None), "Custom metadata should have been in result" - -# Archiving a service can impact it from being seen by other operations. Should probably be done with a separate -# service -def test_archive(): - cli(["catalog", "archive", "-t", "cli-test-service"]) - cli(["catalog", "unarchive", "-t", "cli-test-service"]) - -def test_list_by_team(capsys): - cli(["catalog", "list", "-o", "test-team-1" ]) - out, err = capsys.readouterr() - out = json.loads(out) - assert any(service['tag'] == 'cli-test-service-with-groups' for service in out['entities']) - -def test_list_with_owners(capsys): - cli(["catalog", "list", "-l", "-io", "-g", "corona-spokesperson"]) - out, err = capsys.readouterr() - out = json.loads(out) - - found_service = False - for entity in out['entities']: - if entity['tag'] == "test-service": - assert len(entity['links']) > 0 - assert len(entity['owners']) > 0 - found_service = True - - assert found_service - -def test_list_descriptors(capsys, tmp_path): - cli(["catalog", "list-descriptors", "-z", "1", "-p", "0", "-y"]) - out, err = capsys.readouterr() - out = json.loads(out) - - f = tmp_path / "descriptor.yaml" - f.write_text(out["descriptors"][0]) - - # Should be able to have a dryrun validate the yaml - cli(["catalog", "create", "-d", "-f", str(f)]) - -# Since gitops not set up for this service, it should return "Not Found". -# Kind of a cheap way out for this test, but it does validate the metod -# was accepted and returnd a value. -def test_gitops_logs(capsys): - # Must be raised as exception, because of the expected 404 status code. - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "gitops-logs", "-t", "test-service"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 - -# Not checking any output because we cannot guarantee scorecards have -# been evaluated. -# -# Can change this in the future when there is a way to ensure that a -# scorecard has been evaluated. -def test_scorecard_scores(capsys): - cli(["catalog", "scorecard-scores", "-t", "test-service"]) diff --git a/tests.orig/test_catalog_create_service.yaml b/tests.orig/test_catalog_create_service.yaml deleted file mode 100644 index bd3d1c5..0000000 --- a/tests.orig/test_catalog_create_service.yaml +++ /dev/null @@ -1,5 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Create Service - x-cortex-tag: cli-test-create - x-cortex-type: service diff --git a/tests.orig/test_command_line_opts.py b/tests.orig/test_command_line_opts.py deleted file mode 100644 index 78d2e6b..0000000 --- a/tests.orig/test_command_line_opts.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Tests for version commands. -""" -from cortexapps_cli.cortex import cli -import pytest - -def test_version(): - with pytest.raises(SystemExit) as excinfo: - cli(["-v"]) - -def test_help(): - with pytest.raises(SystemExit) as excinfo: - cli(["-h"]) - -def test_no_parms(): - with pytest.raises(SystemExit) as excinfo: - cli([]) - -def test_integrations_no_parms(): - with pytest.raises(SystemExit) as excinfo: - cli(["integrations"]) - -def test_integrations_help(): - with pytest.raises(SystemExit) as excinfo: - cli(["integrations", "-h"]) - -def test_integrations_command(): - with pytest.raises(SystemExit) as excinfo: - cli(["integrations", "aws"]) - -def test_command_no_options(): - with pytest.raises(SystemExit) as excinfo: - cli(["catalog"]) diff --git a/tests.orig/test_config_file.py b/tests.orig/test_config_file.py deleted file mode 100644 index 64bcd31..0000000 --- a/tests.orig/test_config_file.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -Tests for the cortex CLI config file -""" - -# These tests are all marked to run in serial order because they make modifications to the -# cortex config file and/or CORTEX_API_KEY value and would potentially impact other tests -# that are running in parallel (with poetry run pytest -n auto), so they are run separately. - -# Additionally, order is VERY IMPORTANT in this file because of the way CORTEX_API key is -# deleted, set to invalid values, etc. Moving test order could impact the overall success -# of pytest. Tread carefully here. -from cortexapps_cli.cortex import cli - -import io -import os -import pytest -import sys -from string import Template - -# Requires user input, so use monkeypatch to set it. -@pytest.fixture(scope="session") -def delete_cortex_api_key(): - if "CORTEX_API_KEY" in os.environ: - del os.environ['CORTEX_API_KEY'] - -@pytest.mark.serial -def test_config_file_api_key_quotes(tmp_path): - cortex_api_key = os.getenv('CORTEX_API_KEY') - f = tmp_path / "cortex_config_api_key_quotes" - template = Template(""" - [default] - api_key = "${cortex_api_key}" - """) - content = template.substitute(cortex_api_key=cortex_api_key) - print(content) - f.write_text(content) - cli(["-c", str(f), "teams", "list"]) - -@pytest.mark.serial -def test_environment_variables(capsys): - cli(["teams", "list"]) - out, err = capsys.readouterr() - #print(out) - print("ERR = " + err) - assert err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY", "Warning should be displayed by default" - - cli(["-q", "teams", "list"]) - out, err = capsys.readouterr() - assert not(err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY"), "Warning should be displayed with -q option" - -@pytest.mark.serial -def test_config_file_create(monkeypatch, tmp_path, delete_cortex_api_key): - with pytest.raises(SystemExit) as excinfo: - monkeypatch.setattr('sys.stdin', io.StringIO('Y')) - f = tmp_path / "test-config.txt" - cli(["-c", str(f), "catalog", "list"]) - -@pytest.mark.serial -def test_config_file_new(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config" - content = """ - [default] - api_key = REPLACE_WITH_YOUR_CORTEX_API_KEY - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - -@pytest.mark.serial -def test_export(capsys, delete_cortex_api_key): - cli(["-t", "rich-sandbox", "backup", "export"]) - out, err = capsys.readouterr() - last_line = out.strip().split("\n")[-1] - sys.stdout.write(out + "\n\n") - sys.stdout.write(last_line + "\n\n") - assert "rich-sandbox" in out - -@pytest.mark.serial -def test_config_file_bad_api_key(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config_bad_api_key" - content = """ - [default] - api_key = invalidApiKey - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" - -@pytest.mark.serial -def test_environment_variable_invalid_key(capsys): - with pytest.raises(SystemExit) as excinfo: - os.environ["CORTEX_API_KEY"] = "invalidKey" - cli(["teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" diff --git a/tests.orig/test_custom_data.py b/tests.orig/test_custom_data.py deleted file mode 100644 index a36a025..0000000 --- a/tests.orig/test_custom_data.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Tests for custom-data commands. -""" -from cortexapps_cli.cortex import cli -import json - -def test_custom_data(): - cli(["custom-data", "add", "-t", "test-service", "-f", "tests/test-custom-data.json"]) - cli(["custom-data", "list", "-t", "test-service"]) - -def test_custom_data_bulk(): - cli(["custom-data", "bulk", "-f", "tests/test-custom-data-bulk.json"]) - -def test_custom_data_bulk_array(): - cli(["custom-data", "bulk", "-f", "tests/test-custom-data-array.json"]) - -def test_custom_data_get(): - cli(["custom-data", "get", "-t", "test-service", "-k", "foo"]) diff --git a/tests.orig/test_custom_events.py b/tests.orig/test_custom_events.py deleted file mode 100644 index f81c817..0000000 --- a/tests.orig/test_custom_events.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Tests for custom-events commands. -""" -from cortexapps_cli.cortex import cli -import json -import time - -def test_custom_events_create(capsys): - cli(["custom-events", "create", "-t", "test-service", "-f", "tests/custom-events.json"]) - cli(["custom-events", "list", "-t", "test-service"]) - cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE"]) - cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE", "-i", "2023-10-10T13:27:51.226"]) - - -def test_custom_event_uuid(capsys): - cli(["custom-events", "create", "-t", "test-service", "-f", "tests/custom-events-configure.json"]) - out, err = capsys.readouterr() - out = json.loads(out) - uuid = out['uuid'] - cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid]) - cli(["custom-events", "update-by-uuid", "-t", "test-service", "-u", uuid, "-f", "tests/custom-events.json"]) - cli(["custom-events", "delete-by-uuid", "-t", "test-service", "-u", uuid]) - cli(["custom-events", "delete-all", "-t", "test-service"]) - diff --git a/tests.orig/test_dependencies.json b/tests.orig/test_dependencies.json deleted file mode 100644 index 5cd19e1..0000000 --- a/tests.orig/test_dependencies.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "This is a description of the dependency", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - } -} diff --git a/tests.orig/test_dependencies.py b/tests.orig/test_dependencies.py deleted file mode 100644 index af727d1..0000000 --- a/tests.orig/test_dependencies.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Tests for teams commands. -""" -from cortexapps_cli.cortex import cli - -def test_dependencies(capsys): - cli(["dependencies", "delete-all", "-r", "dependency-service"]) - cli(["dependencies", "add", "-r", "dependency-service", "-e", - "test-service", "-m", "GET", "-p", "/2.0/users/{username}", "-f", "tests/test_dependencies.json"]) - - cli(["dependencies", "delete", "-r", "dependency-service", "-e", "test-service", "-m", "GET", "-p", "/2.0/users/{username}"]) - - cli(["dependencies", "add-in-bulk", "-f", "tests/test_dependencies_bulk.json"]) - - cli(["dependencies", "get", "-r", "dependency-service", "-e", "test-service", "-m", "GET", "-p", "/2.0/users/{username}"]) - - cli(["dependencies", "get-all", "-r", "dependency-service", "-o"]) - - cli(["dependencies", "update", "-r", "dependency-service", "-e", "test-service", "-m", "GET", "-p", "/2.0/users/{username}", "-f", "tests/test_dependencies_update.json"]) - - cli(["dependencies", "add-in-bulk", "-f", "tests/test_dependencies_bulk.json"]) - cli(["dependencies", "delete-in-bulk", "-f", "tests/test_dependencies_bulk.json"]) - cli(["dependencies", "delete-all", "-r", "dependency-service"]) diff --git a/tests.orig/test_dependencies_bulk.json b/tests.orig/test_dependencies_bulk.json deleted file mode 100644 index 5e1c65e..0000000 --- a/tests.orig/test_dependencies_bulk.json +++ /dev/null @@ -1,16 +0,0 @@ -{ -"values": { - "dependency-service": [ - { - "description": "dependency descr", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - }, - "method": "GET", - "path": "/2.0/users/{username}", - "tag": "test-service" - } - ] -} -} diff --git a/tests.orig/test_dependencies_update.json b/tests.orig/test_dependencies_update.json deleted file mode 100644 index f4ff253..0000000 --- a/tests.orig/test_dependencies_update.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "This is the updated description of the dependency", - "metadata": { - "newField": "newField data", - "newField1": "newField1 data" - } -} diff --git a/tests.orig/test_deploys.json b/tests.orig/test_deploys.json deleted file mode 100644 index e934c4d..0000000 --- a/tests.orig/test_deploys.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customData": { - "test-field-1": "value1" - }, - "deployer": { - "email": "test-user@example.com", - "name": "Test Deployer" - }, - "environment": "testEnv", - "sha": "SHA-123456", - "timestamp": "2023-08-16T22:55:38.284Z", - "title": "deploy-001", - "type": "DEPLOY" -} diff --git a/tests.orig/test_deploys.py b/tests.orig/test_deploys.py deleted file mode 100644 index b418eb6..0000000 --- a/tests.orig/test_deploys.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Tests for deploys commands. -""" -from cortexapps_cli.cortex import cli -import json - -def _add_deploy(): - cli(["deploys", "add", "-t", "cli-test-service", "-f", "tests/test_deploys.json"]) - -def test_deploys(capsys): - # This has to be the first call to the cli because we want to capture the output and capsys - # captures output collectively. - cli(["deploys", "add", "-t", "cli-test-service", "-f", "tests/test_deploys_uuid.json"]) - out, err = capsys.readouterr() - out = json.loads(out) - uuid = out['uuid'] - - cli(["-d", "deploys", "update-by-uuid", "-t", "cli-test-service", "-u", uuid, "-f", "tests/test_deploys_update.json"]) - - cli(["deploys", "delete-by-uuid", "-t", "cli-test-service", "-u", uuid]) - - _add_deploy() - - cli(["deploys", "list", "-t", "cli-test-service"]) - - cli(["deploys", "delete", "-t", "cli-test-service", "-s", "SHA-123456"]) - - _add_deploy() - cli(["deploys", "delete-filter", "-y", "DEPLOY"]) - - _add_deploy() - cli(["deploys", "delete-all"]) diff --git a/tests.orig/test_deploys_update.json b/tests.orig/test_deploys_update.json deleted file mode 100644 index 92e8e58..0000000 --- a/tests.orig/test_deploys_update.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customData": { - "test-field-1": "value1" - }, - "deployer": { - "email": "test-user@example.com", - "name": "Test Deployer" - }, - "environment": "testEnv", - "sha": "SHA-456789", - "timestamp": "2023-11-29T22:55:38.284Z", - "title": "deploy-001", - "type": "DEPLOY" -} diff --git a/tests.orig/test_deploys_uuid.json b/tests.orig/test_deploys_uuid.json deleted file mode 100644 index fa6d6df..0000000 --- a/tests.orig/test_deploys_uuid.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customData": { - "test-field-2": "value2" - }, - "deployer": { - "email": "test-user@example.com", - "name": "Test Deployer" - }, - "environment": "testEnv", - "sha": "SHA-999999", - "timestamp": "2023-11-29T16:29:38.284Z", - "title": "deploy-002", - "type": "DEPLOY" -} diff --git a/tests.orig/test_discovery_audit.py b/tests.orig/test_discovery_audit.py deleted file mode 100644 index 6c4993e..0000000 --- a/tests.orig/test_discovery_audit.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Tests for the discovery-audit commands. -""" -from cortexapps_cli.cortex import cli - -def test_discovery_audit_get(): - cli(["discovery-audit", "get"]) - -def test_discovery_audit_get_include_ignored(): - cli(["discovery-audit", "get", "-i"]) - -def test_discovery_audit_filter_on_source(): - cli(["discovery-audit", "get", "-s", "GITHUB"]) - -def test_discovery_audit_filter_on_type(): - cli(["discovery-audit", "get", "-t", "NEW_REPOSITORY"]) - diff --git a/tests.orig/test_docs.py b/tests.orig/test_docs.py deleted file mode 100644 index f4ff257..0000000 --- a/tests.orig/test_docs.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Tests for docs commands. -""" -from cortexapps_cli.cortex import cli - -def test_docs(): - cli(["docs", "update", "-t", "cli-test-service", "-f", "tests/test_docs.yaml"]) - - cli(["docs", "get", "-t", "cli-test-service"]) - - cli(["docs", "delete", "-t", "cli-test-service"]) diff --git a/tests.orig/test_docs.yaml b/tests.orig/test_docs.yaml deleted file mode 100644 index 053daca..0000000 --- a/tests.orig/test_docs.yaml +++ /dev/null @@ -1,42 +0,0 @@ -openapi: 3.0.0 -info: - title: Simple API overview - version: 2.0.0 -paths: - /: - get: - operationId: listVersionsv2 - summary: List API versions with 'full' history - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - status: CURRENT - updated: "2011-01-21T11:33:21Z" - id: v2.0 - links: - - href: http://127.0.0.1:8774/v2/ - rel: self - put: - operationId: SetVersions - summary: Set version - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - status: CURRENT - updated: "2011-01-21T11:33:21Z" - id: v2.0 - links: - - href: http://127.0.0.1:8774/v2/ - rel: self diff --git a/tests.orig/test_gitops_logs.py b/tests.orig/test_gitops_logs.py deleted file mode 100644 index e4d4be0..0000000 --- a/tests.orig/test_gitops_logs.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Tests for the gitops-logs commands. -""" -from cortexapps_cli.cortex import cli -from datetime import datetime, timedelta, timezone -import json -import sys -import pytest - -def test_gitops_logs_get(): - cli(["gitops-logs", "get"]) - -def test_gitops_logs_page_size(capsys): - cli(["-d", "gitops-logs", "get", "-p", "1", "-z", "5"]) - out, err = capsys.readouterr() - out = json.loads(out) - assert len(out['logs']) == 5, "Changing page size should return requested amount of entries" diff --git a/tests.orig/test_groups.py b/tests.orig/test_groups.py deleted file mode 100644 index d835584..0000000 --- a/tests.orig/test_groups.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -Tests for groups commands. -""" -from cortexapps_cli.cortex import cli - -def test_groups_add(): - cli(["groups", "add", "-t", "test-service", "-f", "tests/test-groups.json"]) - -def test_groups_get(): - cli(["groups", "get", "-t", "test-service"]) - -def test_groups_delete(): - cli(["groups", "delete", "-t", "test-service", "-f", "tests/test-groups.json"]) - cli(["groups", "get", "-t", "test-service"]) diff --git a/tests.orig/test_integrations_aws_config.json b/tests.orig/test_integrations_aws_config.json deleted file mode 100644 index 437cacb..0000000 --- a/tests.orig/test_integrations_aws_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "configurations": [ - { - "accountId": "123456", - "role": "my-new-role" - }, - { - "accountId": "284224559761", - "role": "Test-Role-Jeff" - } - ] -} diff --git a/tests.orig/test_integrations_coralogix.py b/tests.orig/test_integrations_coralogix.py deleted file mode 100644 index 31ac364..0000000 --- a/tests.orig/test_integrations_coralogix.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Tests for coralogix integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import json -import os -import pytest -import responses - -coralogix_api_key = json.dumps("fakeKey") - -def _coralogix_input(tmp_path): - f = tmp_path / "test_integrations_coralogix_add.json" - template = Template(""" - { - "alias": "test", - "apiKey": ${coralogix_api_key}, - "isDefault": true, - "region": "US1" - } - """) - content = template.substitute(coralogix_api_key=coralogix_api_key) - f.write_text(content) - return f - -def test_integrations_coralogix_add(tmp_path): - f = _coralogix_input(tmp_path) - - cli(["integrations", "coralogix", "delete-all"]) - cli(["integrations", "coralogix", "add", "-f", str(f)]) - cli(["integrations", "coralogix", "get", "-a", "test"]) - cli(["integrations", "coralogix", "get-all"]) - cli(["integrations", "coralogix", "get-default"]) - - cli(["integrations", "coralogix", "update", "-a", "test", "-f", str(f)]) - cli(["integrations", "coralogix", "delete", "-a", "test"]) - - f = tmp_path / "test_integrations_coralogix_update_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "test", - "apiKey": ${coralogix_api_key}, - "isDefault": true, - "region": "US1" - }, - { - "alias": "test-2", - "apiKey": ${coralogix_api_key}, - "isDefault": true, - "region": "US2" - } - ] - } - """) - content = template.substitute(coralogix_api_key=coralogix_api_key) - f.write_text(content) - cli(["integrations", "coralogix", "add-multiple", "-f", str(f)]) - cli(["integrations", "coralogix", "delete-all"]) - -@responses.activate -def test_integrations_coralogix_validate(tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "coralogix", "validate", "-a", "test"]) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) - cli(["integrations", "coralogix", "validate-all"]) diff --git a/tests.orig/test_integrations_github.py b/tests.orig/test_integrations_github.py deleted file mode 100644 index 3ce9976..0000000 --- a/tests.orig/test_integrations_github.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Tests for github integration commands. -""" -from cortexapps_cli.cortex import cli -import os -import pytest -import responses - -# Since responses are all mocked and no data validation is done by the CLI -- -# we let the API handle validation -- we don't need valid input files. -def _dummy_file(tmp_path): - f = tmp_path / "test.json" - f.write_text("foobar") - return f - -@responses.activate -def test_integrations_github_add_personal(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/personal", status=200) - cli(["integrations", "github", "add-personal", "-f", str(f)]) - -@responses.activate -def test_integrations_github_update_personal(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/personal/pt-001", status=200) - cli(["integrations", "github", "update-personal", "-a", "pt-001", "-f", str(f)]) - -@responses.activate -def test_integrations_github_get_personal(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/personal/pt-001", status=200) - cli(["integrations", "github", "get-personal", "-a", "pt-001"]) - -@responses.activate -def test_integrations_github_add(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/app", status=200) - cli(["integrations", "github", "add", "-f", str(f)]) - -@responses.activate -def test_integrations_github_get(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/app/pt-001", status=200) - cli(["integrations", "github", "get", "-a", "pt-001"]) - -@responses.activate -def test_integrations_github_get_all(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", status=200) - cli(["integrations", "github", "get-all"]) - -@responses.activate -def test_integrations_github_get_default(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/default-configuration", status=200) - cli(["integrations", "github", "get-default"]) - -@responses.activate -def test_integrations_github_validate(capsys, tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/validate/pt-001", status=200) - cli(["integrations", "github", "validate", "-a", "pt-001"]) - -@responses.activate -def test_integrations_github_validate_all(capsys, tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/validate", status=200) - cli(["integrations", "github", "validate-all"]) - -@responses.activate -def test_integrations_github_update(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/app/pt-001", status=200) - cli(["integrations", "github", "update", "-a", "pt-001", "-f", str(f)]) - -@responses.activate -def test_integrations_github_delete_all(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", status=200) - cli(["integrations", "github", "delete-all"]) diff --git a/tests.orig/test_integrations_github_update.json b/tests.orig/test_integrations_github_update.json deleted file mode 100644 index 4d85745..0000000 --- a/tests.orig/test_integrations_github_update.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "alias": "github-test-3", - "isDefault": false -} diff --git a/tests.orig/test_integrations_gitlab.py b/tests.orig/test_integrations_gitlab.py deleted file mode 100644 index e7809a9..0000000 --- a/tests.orig/test_integrations_gitlab.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Tests for gitlab integration commands. -""" -from cortexapps_cli.cortex import cli -import os -import sys -from string import Template - -def test_integrations_gitlab(tmp_path): - cli(["integrations", "gitlab", "delete-all"]) - - gitlab_personal_token = os.getenv('GITLAB_PERSONAL_TOKEN') - f = tmp_path / "test_integrations_gitlab_add.json" - template = Template("""{ - "alias": "cortex-test", - "groupNames": [ - ], - "hidePersonalProjects": false, - "isDefault": true, - "personalAccessToken": "${gitlab_personal_token}" - } - """) - content = template.substitute(gitlab_personal_token=gitlab_personal_token) - f.write_text(content) - cli(["integrations", "gitlab", "add", "-f", str(f)]) - - cli(["integrations", "gitlab", "get", "-a", "cortex-test"]) - - cli(["integrations", "gitlab", "get-all"]) - - cli(["integrations", "gitlab", "get-default"]) - - cli(["integrations", "gitlab", "validate", "-a", "cortex-test"]) - - cli(["integrations", "gitlab", "validate-all"]) - - cli(["integrations", "gitlab", "update", "-a", "cortex-test", "-f", "tests/test_integrations_gitlab_update.json"]) - - cli(["integrations", "gitlab", "add-multiple", "-f", "tests/test_integrations_gitlab_add_multiple.json"]) - - cli(["integrations", "gitlab", "delete", "-a", "cortex-test-2"]) - - cli(["integrations", "gitlab", "delete-all"]) diff --git a/tests.orig/test_integrations_gitlab_add_multiple.json b/tests.orig/test_integrations_gitlab_add_multiple.json deleted file mode 100644 index be6ed5c..0000000 --- a/tests.orig/test_integrations_gitlab_add_multiple.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "configurations": [ - { - "alias": "cortex-test-2", - "groupNames": [ - ], - "hidePersonalProjects": false, - "isDefault": false, - "personalAccessToken": "bogusToken1" - }, - { - "alias": "cortex-test-3", - "groupNames": [ - ], - "hidePersonalProjects": false, - "isDefault": false, - "personalAccessToken": "bogusToken2" - } - ] -} diff --git a/tests.orig/test_integrations_gitlab_update.json b/tests.orig/test_integrations_gitlab_update.json deleted file mode 100644 index 905a854..0000000 --- a/tests.orig/test_integrations_gitlab_update.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "alias": "cortex-test-1", - "groupNames": [ - ], - "hidePersonalProjects": false, - "host": "null", - "isDefault": true -} diff --git a/tests.orig/test_integrations_launchdarkly.py b/tests.orig/test_integrations_launchdarkly.py deleted file mode 100644 index 030672a..0000000 --- a/tests.orig/test_integrations_launchdarkly.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Tests for launchdarkly integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import json -import os -import pytest -import responses - -launchdarkly_api_key = json.dumps("fakeKey") - -def _launchdarkly_input(tmp_path): - f = tmp_path / "test_integrations_launchdarkly_add.json" - template = Template(""" - { - "alias": "test", - "apiKey": ${launchdarkly_api_key}, - "environment": "DEFAULT", - "isDefault": true - } - """) - content = template.substitute(launchdarkly_api_key=launchdarkly_api_key) - f.write_text(content) - return f - -def test_integrations_launchdarkly_add(tmp_path): - f = _launchdarkly_input(tmp_path) - - cli(["integrations", "launchdarkly", "delete-all"]) - cli(["integrations", "launchdarkly", "add", "-f", str(f)]) - cli(["integrations", "launchdarkly", "get", "-a", "test"]) - cli(["integrations", "launchdarkly", "get-all"]) - cli(["integrations", "launchdarkly", "get-default"]) - - cli(["integrations", "launchdarkly", "update", "-a", "test", "-f", str(f)]) - cli(["integrations", "launchdarkly", "delete", "-a", "test"]) - - f = tmp_path / "test_integrations_launchdarkly_update_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "test", - "apiKey": ${launchdarkly_api_key}, - "environment": "DEFAULT", - "isDefault": true - }, - { - "alias": "test-2", - "apiKey": ${launchdarkly_api_key}, - "environment": "FEDERAL", - "isDefault": false - } - ] - } - """) - content = template.substitute(launchdarkly_api_key=launchdarkly_api_key) - f.write_text(content) - cli(["integrations", "launchdarkly", "add-multiple", "-f", str(f)]) - cli(["integrations", "launchdarkly", "delete-all"]) - -@responses.activate -def test_integrations_launchdarkly_validate(tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "launchdarkly", "validate", "-a", "test"]) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) - cli(["integrations", "launchdarkly", "validate-all"]) diff --git a/tests.orig/test_integrations_pagerduty.py b/tests.orig/test_integrations_pagerduty.py deleted file mode 100644 index 334fe0b..0000000 --- a/tests.orig/test_integrations_pagerduty.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -Tests for pagerduty integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import os - -pagerduty_token = os.getenv('PAGERDUTY_TOKEN') - -def test_integrations_pagerduty(tmp_path): - f = tmp_path / "test_integrations_pagerduty_add.json" - template = Template(""" - { - "isTokenReadonly": true, - "token": "${pagerduty_token}" - } - """) - content = template.substitute(pagerduty_token=pagerduty_token) - f.write_text(content) - - cli(["integrations", "pagerduty", "delete"]) - cli(["integrations", "pagerduty", "add", "-f", str(f)]) - cli(["integrations", "pagerduty", "get"]) - cli(["integrations", "pagerduty", "validate"]) - cli(["integrations", "pagerduty", "delete"]) - diff --git a/tests.orig/test_integrations_prometheus.py b/tests.orig/test_integrations_prometheus.py deleted file mode 100644 index b0c7730..0000000 --- a/tests.orig/test_integrations_prometheus.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -Tests for prometheus integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import os - -prometheus_host = os.getenv('PROMETHEUS_HOST') -prometheus_password = os.getenv('PROMETHEUS_PASSWORD') -prometheus_user = os.getenv('PROMETHEUS_USER') - -def _prometheus_input(tmp_path): - f = tmp_path / "test_integrations_prometheus_add.json" - template = Template(""" - { - "alias": "cortex-test", - "host": "${prometheus_host}", - "isDefault": true, - "password": "${prometheus_password}", - "prometheusTenantId": "string", - "username": "${prometheus_user}" - } - """) - content = template.substitute(prometheus_host=prometheus_host, prometheus_password=prometheus_password, prometheus_user=prometheus_user) - f.write_text(content) - return f - - cli(["integrations", "prometheus", "delete-all"]) - - f = _prometheus_input(tmp_path) - cli(["integrations", "prometheus", "add", "-f", str(f)]) - - cli(["integrations", "prometheus", "get", "-a", "cortex-test"]) - - cli(["integrations", "prometheus", "get-all"]) - - cli(["integrations", "prometheus", "get-default"]) - - f = _prometheus_input(tmp_path) - cli(["integrations", "prometheus", "update", "-a", "cortex-test", "-f", str(f)]) - - cli(["integrations", "prometheus", "delete", "-a", "cortex-test"]) - - f = tmp_path / "test_integrations_prometheus_add_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "cortex-test-2", - "host": "${prometheus_host}", - "isDefault": false, - "password": "${prometheus_password}", - "prometheusTenantId": "string", - "username": "${prometheus_user}" - }, - { - "alias": "cortex-test-3", - "host": "${prometheus_host}", - "isDefault": false, - "password": "${prometheus_password}", - "prometheusTenantId": "string", - "username": "${prometheus_user}" - } - ] - } - """) - content = template.substitute(prometheus_host=prometheus_host, prometheus_password=prometheus_password, prometheus_user=prometheus_user) - f.write_text(content) - cli(["integrations", "prometheus", "add-multiple", "-f", str(f)]) - - cli(["integrations", "prometheus", "delete-all"]) - diff --git a/tests.orig/test_integrations_sonarqube.py b/tests.orig/test_integrations_sonarqube.py deleted file mode 100644 index b84c7d3..0000000 --- a/tests.orig/test_integrations_sonarqube.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -Tests for sonarqube integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import json -import os -import pytest -import responses - -sonarqube_host = os.getenv('SONARQUBE_HOST') -sonarqube_personal_token = os.getenv('SONARQUBE_PERSONAL_TOKEN') - -def _sonarqube_input(tmp_path): - f = tmp_path / "test_integrations_sonarqube_add.json" - template = Template(""" - { - "alias": "cortex-test", - "host": "${sonarqube_host}", - "isDefault": true, - "token": "${sonarqube_personal_token}" - } - """) - content = template.substitute(sonarqube_host=sonarqube_host, sonarqube_personal_token=sonarqube_personal_token) - f.write_text(content) - return f - -def test_integrations_sonarqube(tmp_path): - cli(["integrations", "sonarqube", "delete-all"]) - - f = _sonarqube_input(tmp_path) - cli(["integrations", "sonarqube", "add", "-f", str(f)]) - - cli(["integrations", "sonarqube", "get", "-a", "cortex-test"]) - - cli(["integrations", "sonarqube", "get-all"]) - - cli(["integrations", "sonarqube", "get-default"]) - - f = _sonarqube_input(tmp_path) - cli(["integrations", "sonarqube", "update", "-a", "cortex-test", "-f", str(f)]) - - cli(["integrations", "sonarqube", "delete", "-a", "cortex-test"]) - - f = tmp_path / "test_integrations_sonarqube_add_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "cortex-test-2", - "host": "${sonarqube_host}", - "isDefault": true, - "token": "${sonarqube_personal_token}" - }, - { - "alias": "cortex-test-3", - "host": "${sonarqube_host}", - "isDefault": true, - "token": "${sonarqube_personal_token}" - } - ] - } - """) - content = template.substitute(sonarqube_host=sonarqube_host, sonarqube_personal_token=sonarqube_personal_token) - f.write_text(content) - cli(["integrations", "sonarqube", "add-multiple", "-f", str(f)]) - -@responses.activate -def test_integrations_sonarqube_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate/cortex-test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "sonarqube", "validate", "-a", "cortex-test"]) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "sonarqube", "validate-all"]) diff --git a/tests.orig/test_invalid-service.yaml b/tests.orig/test_invalid-service.yaml deleted file mode 100644 index ab87147..0000000 --- a/tests.orig/test_invalid-service.yaml +++ /dev/null @@ -1,8 +0,0 @@ -openapi: 3.0.1 -info: - title: Invalid Service to test dryrun - x-cortex-bag: invalid-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io diff --git a/tests.orig/test_ip_allowlist.py b/tests.orig/test_ip_allowlist.py deleted file mode 100644 index 72f910c..0000000 --- a/tests.orig/test_ip_allowlist.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Tests for ip-allowlist commands. -""" -from cortexapps_cli.cortex import cli -import requests -import pytest -from string import Template - -def _ip_allowlist_input(tmp_path): - ip_address = requests.get("https://ip.me").text.strip() - f = tmp_path / "test_ip_allowlist_input.json" - template = Template(""" - { - "entries": [ - { - "address": "${ip_address}", - "description": "string" - } - ] - } - """) - content = template.substitute(ip_address=ip_address) - f.write_text(content) - return f - -def test(capsys, tmp_path): - cli(["ip-allowlist", "get"]) - - f = _ip_allowlist_input(tmp_path) - cli(["ip-allowlist", "validate", "-f", str(f)]) - - f = _ip_allowlist_input(tmp_path) - cli(["ip-allowlist", "replace", "-f", str(f)]) - - cli(["ip-allowlist", "replace", "-f", "tests/test_ip_allowlist_empty.json"]) - - with pytest.raises(SystemExit) as excinfo: - cli(["ip-allowlist", "validate", "-f", "tests/test_ip_allowlist_invalid.json"]) - out, err = capsys.readouterr() - response = json.loads(out) - #print(err) - #assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" - assert out == "Bad Request" - assert excinfo.value.code == 400 diff --git a/tests.orig/test_ip_allowlist_empty.json b/tests.orig/test_ip_allowlist_empty.json deleted file mode 100644 index 3fde4b2..0000000 --- a/tests.orig/test_ip_allowlist_empty.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "entries": [ - ] -} diff --git a/tests.orig/test_ip_allowlist_invalid.json b/tests.orig/test_ip_allowlist_invalid.json deleted file mode 100644 index bc9c040..0000000 --- a/tests.orig/test_ip_allowlist_invalid.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "entries": [ - { - "addressList": "10.0.0.1", - "description": "string" - } - ] - } diff --git a/tests.orig/test_packages.py b/tests.orig/test_packages.py deleted file mode 100644 index 3a50533..0000000 --- a/tests.orig/test_packages.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -Tests for packages commands. -""" -from cortexapps_cli.cortex import cli - -def test_packages(): - cli(["packages", "go", "upload", "-t", "test-service", "-f", "tests/test_packages_go.sum"]) - - cli(["packages", "java", "upload-single", "-t", "test-service", "-f", "tests/test_packages_java_single.json"]) - - cli(["packages", "java", "upload-multiple", "-t", "test-service", "-f", "tests/test_packages_java_multiple.json"]) - - cli(["packages", "python", "upload-pipfile", "-t", "test-service", "-f", "tests/test_packages_python_pipfile.lock"]) - - cli(["packages", "python", "upload-requirements", "-t", "test-service", "-f", "tests/test_packages_python_requirements.txt"]) - - cli(["packages", "node", "upload-package", "-t", "test-service", "-f", "tests/test_packages_node_package.json"]) - - cli(["packages", "node", "upload-package-lock", "-t", "test-service", "-f", "tests/test_packages_node_package_lock.json"]) - - cli(["packages", "node", "upload-yarn-lock", "-t", "test-service", "-f", "tests/test_packages_node_yarn.lock"]) - - cli(["packages", "list", "-t", "test-service"]) - - cli(["packages", "java", "delete", "-t", "test-service", "-n", "io.cortex.teams"]) - - cli(["packages", "python", "delete", "-t", "test-service", "-n", "cycler"]) - - cli(["packages", "node", "delete", "-t", "test-service", "-n", "inter-angular"]) - - cli(["packages", "list", "-t", "test-service"]) - - cli(["packages", "nuget", "upload-packages-lock", "-t", "test-service", "-f", "tests/test_packages_nuget_packages_lock.json"]) - - cli(["packages", "nuget", "upload-csproj", "-t", "test-service", "-f", "tests/test_packages_nuget.csproj"]) diff --git a/tests.orig/test_plugins.json b/tests.orig/test_plugins.json deleted file mode 100644 index 3150e65..0000000 --- a/tests.orig/test_plugins.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "blob": "
", - "contexts": [ - { - "type": "GLOBAL" - }, - { - "entityFilter": { - "type": "SERVICE_FILTER" - }, - "type": "ENTITY" - } - ], - "description": "Just testin' plugin uploads", - "isDraft": false, - "minimumRoleRequired": "VIEWER", - "name": "My Test Plugin", - "tag": "my-test-plugin" -} - diff --git a/tests.orig/test_plugins.py b/tests.orig/test_plugins.py deleted file mode 100644 index 53934b0..0000000 --- a/tests.orig/test_plugins.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Tests for plugins commands. -""" -from cortexapps_cli.cortex import cli -import pytest - -@pytest.mark.skip(reason="Needs fix for CET-8598") -def test(capsys): - cli(["plugins", "get"]) - out, err = capsys.readouterr() - if (str(out).find('{"tag":"my-test-plugin"') != -1): - cli(["plugins", "delete", "-t", "my-test-plugin"]) - cli(["plugins", "create", "-f", "tests/test_plugins.json"]) - - cli(["plugins", "get"]) - - cli(["plugins", "update", "-t", "my-test-plugin", "-f", "tests/test_plugins_update.json"]) - - cli(["plugins", "get-by-tag", "-t", "my-test-plugin"]) - - cli(["plugins", "delete", "-t", "my-test-plugin"]) diff --git a/tests.orig/test_plugins_update.json b/tests.orig/test_plugins_update.json deleted file mode 100644 index 2bbb1b5..0000000 --- a/tests.orig/test_plugins_update.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "blob": "
", - "contexts": [ - { - "type": "GLOBAL" - }, - { - "entityFilter": { - "type": "SERVICE_FILTER" - }, - "type": "ENTITY" - } - ], - "description": "Just testin' plugin uploads", - "isDraft": false, - "minimumRoleRequired": "VIEWER", - "name": "My Test Plugin" -} diff --git a/tests.orig/test_queries.json b/tests.orig/test_queries.json deleted file mode 100644 index 55882fc..0000000 --- a/tests.orig/test_queries.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "query": "tag = \"cli-test-service\" and custom(\"testField\") != null" -} diff --git a/tests.orig/test_queries.py b/tests.orig/test_queries.py deleted file mode 100644 index 0e39781..0000000 --- a/tests.orig/test_queries.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Tests for queries commands. -""" -from cortexapps_cli.cortex import cli -from datetime import datetime -from datetime import timedelta -import json -import pytest -from string import Template -import sys -import subprocess - -@pytest.mark.skip(reason="Does not reliably complete within 5 minutes; will check with engineering for suggestions.") -def test_queries_run_json(tmp_path): - today = datetime.now().strftime("%m-%d-%Y-%H-%M-%S") - - f = tmp_path / "cql.json" - template = Template("""{ - "query": "tag = \\"cli-test-service\\" and custom(\\"today\\") = \\"${today}\\"" - }""") - content = template.substitute(today=today) - f.write_text(content) - - f1 = tmp_path / "custom-data-query-1.json" - template = Template(""" - { - "key": "today", - "value": "${today}" - } - """) - custom_content = template.substitute(today=today) - f1.write_text(custom_content) - - cli(["custom-data", "add", "-t", "cli-test-service", "-f", str(f1)]) - cli(["-d", "queries", "run", "-w", "-x", "300", "-f", str(f)]) - -@pytest.mark.skip(reason="Does not reliably complete within 5 minutes; will check with engineering for suggestions.") -def test_queries_run_text(tmp_path): - today = datetime.now() - yesterday = today - timedelta(days = 1) - yesterday = yesterday.strftime("%m-%d-%Y-%H-%M-%S") - - f = tmp_path / "cql.txt" - template = Template(""" - tag = "cli-test-service" and custom("yesterday") = "${yesterday}" - """) - content = template.substitute(yesterday=yesterday) - f.write_text(content) - - f1 = tmp_path / "custom-data-query-2.json" - template = Template(""" - { - "key": "yesterday", - "value": "${yesterday}" - } - """) - content = template.substitute(yesterday=yesterday) - f1.write_text(content) - - cli(["custom-data", "add", "-t", "cli-test-service", "-f", str(f1)]) - cli(["queries", "run", "-w", "-x", "300", "-f", str(f)]) - -# Verify timeout handling. If CQL query completes in 2 seconds, this test -# could fail. Could probably put in try/catch stanza. -def test_queries_run_timeout(): - with pytest.raises(SystemExit) as excinfo: - cli(["queries", "run", "-w", "-x", "2", "-f", "tests/test_queries.txt"]) diff --git a/tests.orig/test_resource_definitions.py b/tests.orig/test_resource_definitions.py deleted file mode 100644 index a659c29..0000000 --- a/tests.orig/test_resource_definitions.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Tests for resource-definitions commands. -""" -from cortexapps_cli.cortex import cli -import json -import sys - -def test_resource_definitions(capsys): - # Delete resource definition if it already exists. - cli(["resource-definitions", "list"]) - out, err = capsys.readouterr() - # Maybe a cleaner way to do this with json object? - if (str(out).find('{"type":"test-resource-definition"') != -1): - cli(["resource-definitions", "delete", "-t", "test-resource-definition"]) - cli(["resource-definitions", "create", "-f", "tests/test-resource-definition.json"]) - - cli(["resource-definitions", "list"]) - - cli(["resource-definitions", "get", "-t", "test-resource-definition"]) - - cli(["resource-definitions", "update", "-t", "test-resource-definition", "-f", "tests/test-resource-definition-update.json"]) diff --git a/tests.orig/test_scorecards.py b/tests.orig/test_scorecards.py deleted file mode 100644 index d7445e8..0000000 --- a/tests.orig/test_scorecards.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -Tests for scorecards commands. -""" -from cortexapps_cli.cortex import cli -import json - -def test_scorecards(): - cli(["scorecards", "create", "-f", "tests/test_scorecards.yaml"]) - - cli(["scorecards", "list"]) - - cli(["scorecards", "shield", "-s", "test-scorecard", "-t", "test-service"]) - - cli(["scorecards", "get", "-t", "test-scorecard"]) - - cli(["scorecards", "descriptor", "-t", "test-scorecard"]) - - cli(["scorecards", "next-steps", "-t", "test-scorecard", "-e", "test-service"]) - - # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. - # 2024-05-06, additionally now blocked by CET-8882 - # cli(["scorecards", "scores", "-t", "test-scorecard", "-e", "test-service"]) - - cli(["scorecards", "scores", "-t", "test-scorecard"]) - -def test_scorecards_drafts(capsys): - cli(["scorecards", "create", "-f", "tests/test_scorecards_draft.yaml"]) - # Only capturing this so it doesn't show up in next call to capsys. - out, err = capsys.readouterr() - - cli(["scorecards", "list", "-s"]) - out, err = capsys.readouterr() - - out = json.loads(out) - assert any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in out['scorecards']) diff --git a/tests.orig/test_stdin.py b/tests.orig/test_stdin.py deleted file mode 100644 index 2ac45fc..0000000 --- a/tests.orig/test_stdin.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Tests for stdin input. -""" -import subprocess - -def test_stdin_input(capsys): - cat_process = subprocess.Popen(['cat', 'tests/test_catalog_create_service.yaml'], stdout=subprocess.PIPE) - cortex_process = subprocess.Popen(['cortexapps_cli/cortex.py', 'catalog', 'create','-f-'],stdin=cat_process.stdout, stdout=subprocess.PIPE) - out, err = cortex_process.communicate() - rc=cortex_process.wait() - assert rc == 0, "catalog test with stdin should succeed" diff --git a/tests.orig/test_teams.py b/tests.orig/test_teams.py deleted file mode 100644 index dafe20e..0000000 --- a/tests.orig/test_teams.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -Tests for teams commands. -""" -from cortexapps_cli.cortex import cli - -import json -import sys - -# Deleted this test for several reasons: -# -# 1. It's failing in release.com environments. -# 2. There appears to be a bug where teams created using the teams API are not immediately -# avaialble from teams list API. -# 3. There are plans to deprecate the teams API and manage everything with the catalog API. -# 4. You can create Cortex-managed teams with the catalog API. -# -# def test_teams_create(capsys): -# cli(["teams", "list"]) -# out, err = capsys.readouterr() -# json_data = json.loads(out) -# -# cli(["catalog", "list"]) -# out, err = capsys.readouterr() -# catalog_json_data = json.loads(out) -# -# sys.stdout.write(str(json_data)) -# -# if any(team['teamTag'] == 'cli-test-team' for team in json_data['teams']): -# sys.stdout.write("deleting cli-test-team") -# cli(["teams", "delete", "-t", "cli-test-team"]) -# -# if any(entity['tag'] == 'cli-test-team' for entity in catalog_json_data['entities']): -# sys.stdout.write("deleting catalog cli-test-team") -# cli(["catalog", "delete", "-t", "cli-test-team"]) -# -# cli(["-d", "teams", "create", "-f", "tests/test_teams.yaml"]) - -def test_teams_get(): - cli(["teams", "get", "-t", "test-team-1"]) - -def test_teams_list(): - cli(["teams", "list"]) - -def test_teams_archive(): - cli(["teams", "archive", "-t", "test-team-1"]) - cli(["teams", "unarchive", "-t", "test-team-1"]) - -def test_teams_update_metadata(): - cli(["teams", "update-metadata", "-t", "test-team-2", "-f", "tests/test_teams_update.json"]) diff --git a/tests.orig/test_teams.yaml b/tests.orig/test_teams.yaml deleted file mode 100644 index 29924a4..0000000 --- a/tests.orig/test_teams.yaml +++ /dev/null @@ -1,42 +0,0 @@ -{ - "teamTag": "cli-test-team", - "metadata": { - "name": "Stanford", - "description": "Stanford Cardinal 1998 Final Four Team", - "summary": null - }, - "links": [], - "slackChannels": [], - "additionalMembers": [ - { - "name": "Mark Madsen", - "email": "mark.madsen@cortex.io", - "description": "forward" - }, - { - "name": "Kris Weems", - "email": "kris.weems@cortex.io", - "description": "shooting guard" - }, - { - "name": "Tim Young", - "email": "tim.young@cortex.io", - "description": "center" - }, - { - "name": "Peter Sauer", - "email": "peter.sauer@cortex.io", - "description": "forward" - }, - { - "name": "Arthur Lee", - "email": "arthur.lee@cortex.io", - "description": "point guard" - } - ], - "isArchived": false, - "cortexTeam": { - "members": [] - }, - "type": "CORTEX" -} diff --git a/tests.orig/test_teams_update.json b/tests.orig/test_teams_update.json deleted file mode 100644 index aa5b528..0000000 --- a/tests.orig/test_teams_update.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "metadata": { - "name": "Stanford", - "description": "Stanford Cardinal 2023 Final Four Team", - "summary": null - }, - "links": [], - "slackChannels": [], - "additionalMembers": [ - { - "name": "Steph Curry", - "email": "steph.curry@cortex.io", - "description": "guard" - } - ], - "cortexTeam":{ - "members": [] - }, - "type": "CORTEX" -} diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/common.py b/tests/common.py deleted file mode 100644 index c6f2b4a..0000000 --- a/tests/common.py +++ /dev/null @@ -1,62 +0,0 @@ -from cortex import cli -#from cortexapps_cli.cortex import cli - -from contextlib import redirect_stdout -from datetime import datetime -from datetime import timedelta -from datetime import timezone -from github import Auth -from github import Github -from string import Template -from types import SimpleNamespace -from unittest import mock -import io -import json -import os -import pytest -import random -import re -import requests -import sys -import tempfile -import textwrap -import time -import yaml -from feature_flag_check import * - -def cli_command(capsys, args, output_type="json"): - args = ["-q"] + args - - try: - cli(args) - except: - captured = capsys.readouterr() - print("cli_command: error: " + captured.err) - - out, err = capsys.readouterr() - - if output_type == "json": - return json.loads(out) - elif output_type == "text": - return out - -def today(): - return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S") - -def yesterday(): - today = datetime.now(timezone.utc) - yesterday = today - timedelta(days = 1) - return yesterday.strftime("%Y-%m-%dT%H:%M:%S") - -def packages(capsys, packageCommand, packageType, version, name, tag): - response = cli_command(capsys, ["packages", "list", "-t", tag]) - assert any(package['packageType'] == packageType and - package['version'] == version and - package['name'] == name - for package in response), "Should find " + packageType + " package with name " + name + " and version " + version + " for entity " + tag - - cli(["packages", packageCommand, "delete", "-t", tag, "-n", name]) - response = cli_command(capsys, ["packages", "list", "-t", tag]) - assert not any(package['packageType'] == packageType and - package['name'] == name - for package in response), "Should not find " + packageType + " package with name " + name diff --git a/tests/cortex_github.py b/tests/cortex_github.py deleted file mode 100644 index 95cb701..0000000 --- a/tests/cortex_github.py +++ /dev/null @@ -1,130 +0,0 @@ -from common import * - -class CortexGithub: - def __init__(self, - url=os.getenv('GH_URL'), - org=os.getenv('GH_ORG'), - repo_name=os.getenv('GH_REPO'), - cortex_alias=os.getenv('CORTEX_GH_ALIAS'), - webhook_url=os.getenv('CORTEX_GH_WEBHOOK_URL')): - - auth = Auth.Token(os.getenv('GH_PAT')) - g = Github(base_url=url, auth=auth) - - organization = g.get_organization(org) - if not any(repo.name == repo_name for repo in organization.get_repos()): - organization.create_repo(repo_name, private=True) - - repo = organization.get_repo(repo_name) - - self.org = org - self.alias = cortex_alias - self.repo = repo - self.webhook_url = webhook_url - - def delete_personal_configuration(self): - output = io.StringIO() - with redirect_stdout(output): - cli(["-q", "integrations", "github", "get-all"]) - response = json.loads(output.getvalue()) - if any(configuration['alias'] == self.alias for configuration in response['configurations']): - cli(["-q", "integrations", "github", "delete-personal", "-a", self.alias]) - - def create_integration(self): - fd, path = tempfile.mkstemp() - template = Template(""" - { - "accessToken": "${gh_pat}", - "alias": "${cortex_gh_alias}", - "isDefault": false - } - """) - content = template.substitute(gh_pat=os.getenv('GH_PAT'), cortex_gh_alias=self.alias) - with open(path, 'w') as f: - f.write(content) - - os.close(fd) - self.delete_personal_configuration() - cli(["-q", "integrations", "github", "add-personal", "-f", path]) - - def create_webhook(self): - EVENTS = ["push", "pull_request"] - - config = { - "url": self.webhook_url, - "secret": os.getenv('GH_WEBHOOK_SECRET'), - "content_type": "json" - } - - for hook in self.repo.get_hooks(): - if hook.config['url'] == self.webhook_url: - hook.delete() - - self.repo.create_hook("web", config, EVENTS, active=True) - - - def read_entity_template(self, file): - with open (file, 'r') as f: - template = Template(f.read()) - return textwrap.dedent(template.substitute(environment=os.getenv('CORTEX_ENV'), tenant=os.getenv('CORTEX_TENANT'), today=today(), org=self.org, repo=self.repo.name, alias=self.alias)) - - - # Wait max_attempts * sleep_interval for git commit to appear in gitops-logs - # Will wait for up to 10 minutes for commit to be processed. - # TODO: find out how we can optimize, or at least understand, the processing time. - def check_gitops_logs(self, capsys, sha): - found = False - #max_attempts = 120 - max_attempts = 30 - sleep_interval = 5 - for attempt in range(1, max_attempts): - response = cli_command(capsys, ["gitops-logs", "get", "-p", "0", "-z", "25"]) - if any(log['commit'] == sha for log in response['logs']): - found = True - break - else: - if attempt == max_attempts: - break - time.sleep(sleep_interval) - - return found - - def commit_cortex_entity(self, repo, content, branch, path): - contents = repo.get_contents("") - - found = False - while contents: - file_content = contents.pop(0) - if file_content.path == path: - found = True - break - if file_content.type == "dir": - contents.extend(repo.get_contents(file_content.path)) - - commit_message = "commit on " + today() + "." - - # https://github.com/PyGithub/PyGithub/issues/1787 - # Seeing some 409 errors with this. Might need a sleep here? Doesn't seem like a great solution. - # Maybe the python implementation gets confused when multiple invocations run in parallel, as happens - # with the pytests running in parallel and the API is called at the same time? - time.sleep(random.randint(1, 10)) - if found: - contents = repo.get_contents(path, ref=branch) - c = repo.update_file(path, commit_message, content, contents.sha, branch=branch) - else: - # TODO - how to create initial file in repo? - c = repo.create_file(path, commit_message, content, branch=branch) - - return c['commit'].sha - - -def gitops_add(capsys, template, path): - g = CortexGithub() - content = g.read_entity_template(template) - sha = g.commit_cortex_entity(g.repo, content, g.repo.default_branch, path) - return g.check_gitops_logs(capsys, sha) - -def github_setup(): - g = CortexGithub() - g.create_webhook() - g.create_integration() diff --git a/tests/feature_flag_check.py b/tests/feature_flag_check.py deleted file mode 100644 index dd66b59..0000000 --- a/tests/feature_flag_check.py +++ /dev/null @@ -1,34 +0,0 @@ -import sys -import os -import json -import traceback - -# QUESTION: should we introduce a flag that ignores whether or not the flag is set? -# In other words, do we want to ensure a certain set of account flags is set and, if not, -# cause the test to fail? - -def read_file(): - feature_flag_file = os.getenv('FEATURE_FLAG_EXPORT') - f = open(feature_flag_file) - data = json.load(f) - f.close() - return data - -def check_boolean_flag(): - data = read_file() - # This little nugget returns the calling function. The calling function - # should map to a lowercase value of the boolean-controlled account flag. - calling_function = traceback.extract_stack(None, 2)[0][2] - flag = calling_function.upper() - return any(f['flag'] == flag and f['value'] == True for f in data) - -def enable_cql_v2(): - return check_boolean_flag() - -def allow_team_entities_in_catalog_api(): - return check_boolean_flag() - -def enable_ui_editing(entity_type): - data = read_file() - return any(f['flag'] == "ENABLE_ENTITY_UI_EDITING" and f['value'][entity_type] == True for f in data) - #return data['ENABLE_UI_EDITING'][entity_type] == true diff --git a/tests/feature_flag_dump.py b/tests/feature_flag_dump.py deleted file mode 100644 index 1a878e9..0000000 --- a/tests/feature_flag_dump.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -import requests -import sys - -file = sys.argv[1] - -h = { - "Authorization": "Bearer " + os.getenv('CORTEX_API_KEY') -} - -url = os.getenv('CORTEX_BASE_URL') + "/api/internal/v1/cortex/preferences" - -try: - r = requests.get(url, headers=h) - r.raise_for_status() -except requests.exceptions.RequestException as e: - print(e.response.text) - sys.exit(1) - -print("feature flags = " + r.text) -f = open(file, "w") -f.write(r.text) -f.close() diff --git a/tests/github_setup.py b/tests/github_setup.py deleted file mode 100644 index 1ba5a36..0000000 --- a/tests/github_setup.py +++ /dev/null @@ -1,3 +0,0 @@ -from cortex_github import * - -github_setup() diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py new file mode 100644 index 0000000..061da45 --- /dev/null +++ b/tests/helpers/utils.py @@ -0,0 +1,44 @@ +from cortexapps_cli.cli import app +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from enum import Enum +import json +import os +import pytest +from typer.testing import CliRunner +from unittest import mock +import responses +import yaml + +runner = CliRunner() + +class ReturnType(str, Enum): + JSON = "JSON" + RAW = "RAW" + STDOUT = "STDOUT" + +def today(): + return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S") + +def yesterday(): + today = datetime.now(timezone.utc) + yesterday = today - timedelta(days = 1) + return yesterday.strftime("%Y-%m-%dT%H:%M:%S") + +def cli(params, return_type=ReturnType.JSON): + if not isinstance(return_type, ReturnType): + raise TypeError('return_type must be an instance of ReturnType Enum') + + result = runner.invoke(app, params) + + match return_type: + case ReturnType.JSON: + if result.stdout == "": + return json.loads('{}') + else: + return json.loads(result.stdout) + case ReturnType.RAW: + return result + case ReturnType.STDOUT: + return result.stdout diff --git a/tests/plugins.json b/tests/plugins.json new file mode 100644 index 0000000..7996fcd --- /dev/null +++ b/tests/plugins.json @@ -0,0 +1,298 @@ +{ + "total": 10, + "page": 0, + "totalPages": 1, + "plugins": [ + { + "tag": "github-deploys", + "name": "GitHub Releases", + "description": null, + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": true, + "types": [ + "service" + ] + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": { + "id": 2800, + "tenantId": 1, + "oauthUserId": "google-oauth2|105763048987093518818", + "name": "David Barnes", + "email": "david.barnes@cortex.io", + "dateCreated": "2022-05-16T16:38:00.288436", + "lastLogin": null + }, + "proxyTag": "github-releases", + "lastUpdated": "2023-06-08T16:17:56.433869" + }, + { + "tag": "techradar", + "name": "TechRadar", + "description": null, + "contexts": [ + { + "type": "GLOBAL" + } + ], + "minimumRoleRequired": "USER", + "isDraft": false, + "createdBy": { + "id": 2351, + "tenantId": 1, + "oauthUserId": "google-oauth2|100446023106570979548", + "name": "Mike Moore", + "email": "mike.moore@cortex.io", + "dateCreated": "2022-03-21T00:08:56.574619", + "lastLogin": null + }, + "proxyTag": null, + "lastUpdated": "2023-10-08T20:11:16.702748" + }, + { + "tag": "cli-installation-and-configuration", + "name": "Installation and Configuration", + "description": "Demonstrates how to install the Cortex Command Line Interface (CLI) from MacOS homebrew.", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": true, + "types": [ + "service" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'cli'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-06-05T20:18:41.825891" + }, + { + "tag": "project-workflows-create-jira-ticket", + "name": "Create Jira Ticket", + "description": "Showcases creating a Jira ticket using Cortex Workflows. Please don't share externally.", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'project-workflows'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-06-08T17:28:16.397023" + }, + { + "tag": "xero-go-live-6-18", + "name": "💪 Xero Go-Live 6-18", + "description": null, + "contexts": [ + { + "type": "GLOBAL" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": { + "id": 9320, + "tenantId": 1, + "oauthUserId": "google-oauth2|115853682294872999618", + "name": "Jeff Schnitter", + "email": "jeff.schnitter@cortex.io", + "dateCreated": "2023-07-24T15:08:04.185523", + "lastLogin": "2025-01-08T23:39:41.901175" + }, + "proxyTag": null, + "lastUpdated": "2024-06-14T22:08:13.791927" + }, + { + "tag": "jeff-schnitter-i-melt-with-you", + "name": "Favorite Song", + "description": "I Melt With You", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'jeff-schnitter'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-09-25T17:00:33.607194" + }, + { + "tag": "hanna-vigil-the-black-dog", + "name": "Favorite Song", + "description": "The Black Dog", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'hanna-vigil'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-09-30T17:22:20.402398" + }, + { + "tag": "plugin-marketplace", + "name": "Plugin Marketplace", + "description": null, + "contexts": [ + { + "type": "GLOBAL" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": { + "id": 25205, + "tenantId": 1, + "oauthUserId": "google-oauth2|110738514566936908176", + "name": "Martin Stone", + "email": "martin.stone@cortex.io", + "dateCreated": "2024-08-05T16:41:18.839233", + "lastLogin": "2025-01-06T17:33:21.527241" + }, + "proxyTag": null, + "lastUpdated": "2024-11-27T23:13:49.785416" + }, + { + "tag": "aaron-wirick-surf-wax-america", + "name": "Favorite Song", + "description": "Surf Wax America", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'aaron-wirick'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-12-06T16:45:45.378944" + }, + { + "tag": "doug-cooper-jealous-(labrinth)", + "name": "Favorite Song", + "description": "Jealous (labrinth)", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'doug-cooper'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2025-01-08T19:59:46.187193" + } + ] +} diff --git a/tests.orig/test-groups.json b/tests/test-groups.json similarity index 100% rename from tests.orig/test-groups.json rename to tests/test-groups.json diff --git a/tests/test_api_keys.py b/tests/test_api_keys.py new file mode 100644 index 0000000..ae58fc2 --- /dev/null +++ b/tests/test_api_keys.py @@ -0,0 +1,13 @@ +from tests.helpers.utils import * + +def test(): + cli(["api-keys", "create", "-d", "Key created from CLI test", "-n", "CLI Test Key", "-dr", "USER"]) + + response = cli(["api-keys", "list"]) + assert any(key['description'] == 'Key created from CLI test' for key in response['apiKeys']), "Should find key with description 'Key created from CLI test'" + + cid = [key['cid'] for key in response['apiKeys'] if key['description'] == 'Key created from CLI test'][0] + print("cid = " + cid) + response = cli(["api-keys", "get", "-c", cid]) + cli(["api-keys", "update", "-c", cid, "-n", "My new name", "-d", "Update: Key created from CLI test"]) + cli(["api-keys", "delete", "-c", cid]) diff --git a/tests/test_audit_logs.py b/tests/test_audit_logs.py index 1351a5f..45722d1 100644 --- a/tests/test_audit_logs.py +++ b/tests/test_audit_logs.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") -def test(capsys): - response = cli_command(capsys, ["audit-logs", "get",]) - assert (len(response['logs']) > 0) +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +def test(): + result = cli(["audit-logs", "get"]) + assert (len(result['logs']) > 0) \ No newline at end of file diff --git a/tests/test_audit_logs_dates.py b/tests/test_audit_logs_dates.py index c21c829..d3a5301 100644 --- a/tests/test_audit_logs_dates.py +++ b/tests/test_audit_logs_dates.py @@ -1,8 +1,8 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") -def test(capsys): - start_date = yesterday() +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +def test(): end_date = today() - response = cli_command(capsys, ["audit-logs", "get", "-s", start_date, "-e", end_date]) - assert (len(response['logs']) > 0) + start_date = yesterday() + result = cli(["audit-logs", "get", "-s", start_date, "-e", end_date]) + assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_end_date.py b/tests/test_audit_logs_end_date.py index 376a471..d1c6f9d 100644 --- a/tests/test_audit_logs_end_date.py +++ b/tests/test_audit_logs_end_date.py @@ -1,7 +1,7 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") -def test(capsys): +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +def test(): end_date = today() - response = cli_command(capsys, ["audit-logs", "get", "-e", end_date]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get", "-e", end_date]) + assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_page.py b/tests/test_audit_logs_page.py index fa7b67a..98d8169 100644 --- a/tests/test_audit_logs_page.py +++ b/tests/test_audit_logs_page.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") -def test(capsys): - response = cli_command(capsys, ["audit-logs", "get", "-p", "0",]) - assert (len(response['logs']) > 0) +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +def test(): + result = cli(["audit-logs", "get", "-p", "0"]) + assert (len(result['logs']) > 0) \ No newline at end of file diff --git a/tests/test_audit_logs_size.py b/tests/test_audit_logs_size.py index 91fbfe8..78f846a 100644 --- a/tests/test_audit_logs_size.py +++ b/tests/test_audit_logs_size.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") -def test(capsys): - response = cli_command(capsys, ["audit-logs", "get", "-z", "1"]) - assert (len(response['logs']) == 1) +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +def test(): + result = cli(["audit-logs", "get", "-p", "0", "-z", "1"]) + assert (len(result['logs']) == 1) \ No newline at end of file diff --git a/tests/test_audit_logs_start_date.py b/tests/test_audit_logs_start_date.py index d20cc97..71c6bde 100644 --- a/tests/test_audit_logs_start_date.py +++ b/tests/test_audit_logs_start_date.py @@ -1,7 +1,7 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") -def test(capsys): +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +def test(): start_date = yesterday() - response = cli_command(capsys, ["audit-logs", "get", "-s", start_date]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get", "-s", start_date]) + assert (len(result['logs']) > 0) \ No newline at end of file diff --git a/tests/test_catalog_archive_entity.py b/tests/test_catalog_archive_entity.py index e3e23be..c6aa7da 100644 --- a/tests/test_catalog_archive_entity.py +++ b/tests/test_catalog_archive_entity.py @@ -1,10 +1,7 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/archive-entity.yaml"]) - cli(["-q", "catalog", "archive", "-t", "archive-entity"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() +def test(): + cli(["catalog", "archive", "-t", "archive-entity"]) - response = cli_command(capsys, ["catalog", "details", "-t", "archive-entity"]) + response = cli(["catalog", "details", "-t", "archive-entity"]) assert response['isArchived'] == True, "isArchived attribute should be true" diff --git a/tests/test_catalog_create_entity.py b/tests/test_catalog_create_entity.py index 079afb2..fd2f8dd 100644 --- a/tests/test_catalog_create_entity.py +++ b/tests/test_catalog_create_entity.py @@ -1,9 +1,7 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/create-entity.yaml"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() +def test(): + cli(["catalog", "create", "-f", "data/import/catalog/cli-test-service.yaml"]) - response = cli_command(capsys, ["catalog", "descriptor", "-t", "create-entity"]) - assert response['info']['x-cortex-tag'] == "create-entity" + response = cli(["catalog", "descriptor", "-t", "cli-test-service"]) + assert response['info']['x-cortex-tag'] == "cli-test-service" diff --git a/tests/test_catalog_create_entity_viewer.py b/tests/test_catalog_create_entity_viewer.py index 73093a9..70c4662 100644 --- a/tests/test_catalog_create_entity_viewer.py +++ b/tests/test_catalog_create_entity_viewer.py @@ -1,11 +1,8 @@ -from common import * +from tests.helpers.utils import * # Using a key with viewer role should be Forbidden. @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) def test(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "catalog", "create", "-f", "data/run-time/create-entity.yaml"]) - out, err = capsys.readouterr() + response = cli(["catalog", "create", "-f", "data/import/catalog/cli-test-create-entity.yaml"], ReturnType.RAW) - assert out == "Forbidden" - assert excinfo.value.code == 403 + assert "HTTP Error 403:" in response.stdout, "command fails with 403 error" diff --git a/tests/test_catalog_delete_entity.py b/tests/test_catalog_delete_entity.py index d73128e..8944d07 100644 --- a/tests/test_catalog_delete_entity.py +++ b/tests/test_catalog_delete_entity.py @@ -1,16 +1,12 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli_command(capsys, ["catalog", "create", "-f", "data/run-time/delete-entity.yaml"]) - response = cli_command(capsys, ["catalog", "details", "-t", "delete-entity"]) - assert response['tag'] == 'delete-entity', "Should find newly created entity" +def test(): + response = cli(["catalog", "details", "-t", "cli-test-delete-entity"]) + assert response['tag'] == 'cli-test-delete-entity', "Should find newly created entity" - cli(["-q", "catalog", "delete", "-t", "delete-entity"]) + cli(["catalog", "delete", "-t", "cli-test-delete-entity"]) # Since entity is deleted, cli command should exit with a Not Found, 404 error. - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "details", "-t", "delete-entity"]) - out, err = capsys.readouterr() + response = cli(["catalog", "details", "-t", "cli-test-delete-entity"], ReturnType.RAW) - assert out == "Not Found" - assert excinfo.value.code == 404 + assert "HTTP Error 404:" in response.stdout, "command fails with 403 error" diff --git a/tests/test_catalog_dryrun.py b/tests/test_catalog_dryrun.py deleted file mode 100644 index 11020ab..0000000 --- a/tests/test_catalog_dryrun.py +++ /dev/null @@ -1,14 +0,0 @@ -from common import * - -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/create-dryrun.yaml", "--dry-run"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - # Entity should not exist. - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "descriptor", "-t", "create-entity-dryrun"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 diff --git a/tests/test_catalog_get_entity_details.py b/tests/test_catalog_get_entity_details.py index d4ad5f1..0d261de 100644 --- a/tests/test_catalog_get_entity_details.py +++ b/tests/test_catalog_get_entity_details.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) - assert response['tag'] == 'backend-worker', "Entity details should be returned" +def test(): + response = cli( ["catalog", "details", "-t", "cli-test-service"]) + assert response['tag'] == 'cli-test-service', "Entity details should be returned" diff --git a/tests/test_catalog_get_entity_details_hierarchy.py b/tests/test_catalog_get_entity_details_hierarchy.py index abc3fa2..a487680 100644 --- a/tests/test_catalog_get_entity_details_hierarchy.py +++ b/tests/test_catalog_get_entity_details_hierarchy.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-i", "groups", "-t", "sso-integration"]) - assert response['hierarchy']['parents'][0]['groups'][0] == 'public-api-test', "Entity groups should be in response" - assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'public-api-test', "Parent groups should be in response" +def test(): + response = cli(["catalog", "details", "-i", "groups", "-t", "cli-test-service"]) + assert response['hierarchy']['parents'][0]['groups'][0] == 'cli-test', "Entity groups should be in response" + assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'cli-test', "Parent groups should be in response" diff --git a/tests/test_catalog_include_links.py b/tests/test_catalog_include_links.py index 2d7c960..24c08ab 100644 --- a/tests/test_catalog_include_links.py +++ b/tests/test_catalog_include_links.py @@ -1,8 +1,7 @@ -from common import * +from tests.helpers.utils import * -# Too brittle if we assume only one entity has group 'include-links-test'? -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "include-links-test"]) +def test(): + response = cli(["catalog", "list", "-g", "include-links-test"]) assert (len(response['entities'][0]['links']) == 0) - response = cli_command(capsys, ["catalog", "list", "-g", "include-links-test", "-l"]) + response = cli(["catalog", "list", "-g", "include-links-test", "-l"]) assert (len(response['entities'][0]['links']) > 0) diff --git a/tests/test_catalog_include_metadata.py b/tests/test_catalog_include_metadata.py index e1c212e..6a70e9a 100644 --- a/tests/test_catalog_include_metadata.py +++ b/tests/test_catalog_include_metadata.py @@ -1,8 +1,8 @@ -from common import * +from tests.helpers.utils import * -# Too brittle if we assume only one entity has group 'include-metadata-test'? -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "include-metadata-test"]) +def test(): + response = cli(["catalog", "list", "-g", "include-metadata-test"]) assert (len(response['entities'][0]['metadata']) == 0) - response = cli_command(capsys, ["catalog", "list", "-g", "include-metadata-test", "-m"]) + + response = cli(["catalog", "list", "-g", "include-metadata-test", "-m"]) assert (len(response['entities'][0]['metadata']) > 0) diff --git a/tests/test_catalog_include_nested_fields.py b/tests/test_catalog_include_nested_fields.py index 3130d96..1702e9f 100644 --- a/tests/test_catalog_include_nested_fields.py +++ b/tests/test_catalog_include_nested_fields.py @@ -1,8 +1,14 @@ -from common import * +from tests.helpers.utils import * -@pytest.mark.skipif(allow_team_entities_in_catalog_api() == False, reason="Account flag ALLOW_TEAM_ENTITIES_IN_CATALOG_API is not set") -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-io", "-in", "team:members"]) - list = [entity for entity in response['entities'] if entity['tag'] == "search-experience"] - assert not list == None, "found search-experience entity in response" +def test(): + response = cli(["catalog", "list", "-t", "team"], ReturnType.STDOUT) + + if "HTTP Error 400: Bad Request - Cannot request teams." in response: + print("This test requires feature flag ALLOW_TEAM_ENTITIES_IN_CATALOG_API, which does not appear to be set, so not running test.") + print("This flag will eventually be set for all workspaces and this check can be removed. However, as of June 2025 this has not been done.") + return + + response = cli(["catalog", "list", "-g", "cli-test", "-io", "-in", "team:members"]) + list = [entity for entity in response['entities'] if entity['tag'] == "cli-test-team-1"] + assert not list == None, "found an entity in response" assert len(list[0]['members']) > 0, "response has non-empty array of members" diff --git a/tests/test_catalog_invalid_page_size.py b/tests/test_catalog_invalid_page_size.py deleted file mode 100644 index 9262a7f..0000000 --- a/tests/test_catalog_invalid_page_size.py +++ /dev/null @@ -1,9 +0,0 @@ -from common import * - -def test(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "list", "-z", "1001"]) - out, err = capsys.readouterr() - - assert "Page size must be set between 1 and 1000; requested value: 1005" in out, "Should get error text about invalid page parameter" - assert excinfo.value.code == 400, "Page size greater than 100 should result in a Bad Request error, http code 400" diff --git a/tests/test_catalog_list_by_github_repo.py b/tests/test_catalog_list_by_github_repo.py index 195b318..2ac9f2b 100644 --- a/tests/test_catalog_list_by_github_repo.py +++ b/tests/test_catalog_list_by_github_repo.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-r", "my-org/my-repo"]) +def test(): + response = cli( ["catalog", "list", "-r", "my-org/my-repo"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_group_multiple.py b/tests/test_catalog_list_by_group_multiple.py index cc7f115..0cce1ad 100644 --- a/tests/test_catalog_list_by_group_multiple.py +++ b/tests/test_catalog_list_by_group_multiple.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test-group-1,public-api-test-group-2"]) +def test(): + response = cli(["catalog", "list", "-g", "cli-test-group-1,cli-test-group-2"]) assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_group_single.py b/tests/test_catalog_list_by_group_single.py index 399d790..1fa785b 100644 --- a/tests/test_catalog_list_by_group_single.py +++ b/tests/test_catalog_list_by_group_single.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test-group-1"]) +def test(): + response = cli(["catalog", "list", "-g", "cli-test-group-1"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_owners_multiple.py b/tests/test_catalog_list_by_owners_multiple.py index 8307bd2..efaba14 100644 --- a/tests/test_catalog_list_by_owners_multiple.py +++ b/tests/test_catalog_list_by_owners_multiple.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-o", "payments-team,search-experience"]) +def test(): + response = cli(["catalog", "list", "-o", "cli-test-team-1,cli-test-team-2"]) assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_owners_single.py b/tests/test_catalog_list_by_owners_single.py index b690010..4ba18c2 100644 --- a/tests/test_catalog_list_by_owners_single.py +++ b/tests/test_catalog_list_by_owners_single.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-o", "payments-team"]) +def test(): + response = cli(["catalog", "list", "-o", "cli-test-team-1"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_types.py b/tests/test_catalog_list_by_types.py index f666b32..1081ee0 100644 --- a/tests/test_catalog_list_by_types.py +++ b/tests/test_catalog_list_by_types.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-t", "component"]) - assert response['total'] > 0, "Should find at least 1 entity of type 'component'" +def test(): + response = cli(["catalog", "list", "-g", "cli-test", "-t", "service"]) + assert response['total'] > 0, "Should find at least 1 entity of type 'service'" diff --git a/tests/test_catalog_list_entity_descriptors.py b/tests/test_catalog_list_entity_descriptors.py index cada75d..c4962b0 100644 --- a/tests/test_catalog_list_entity_descriptors.py +++ b/tests/test_catalog_list_entity_descriptors.py @@ -1,6 +1,7 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors"]) - list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "autocomplete"] - assert list[0]['info']['x-cortex-groups'][0] == "public-api-test" +def test(): + response = cli(["catalog", "list-descriptors", "-t", "service"]) + + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "cli-test-service"] + assert list[0]['info']['x-cortex-groups'][0] == "cli-test" diff --git a/tests/test_catalog_list_entity_descriptors_page.py b/tests/test_catalog_list_entity_descriptors_page.py index 1d38a1b..170c97a 100644 --- a/tests/test_catalog_list_entity_descriptors_page.py +++ b/tests/test_catalog_list_entity_descriptors_page.py @@ -1,5 +1,8 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors", "-t", "component", "-p", "0", "-z", "1"]) - assert response['descriptors'][0]['info']['x-cortex-tag'] == "backend-worker" +def test(): + response = cli(["catalog", "list-descriptors", "-t", "service", "-p", "0", "-z", "1"]) + + # YAML descriptor has single quotes, so cannot read it as valid JSON. First convert to double quotes. + json_data = json.loads(str(response).replace("'", "\"")) + assert len(json_data['descriptors']) == 1, "exactly one descriptor is returned" diff --git a/tests/test_catalog_list_entity_descriptors_page_size.py b/tests/test_catalog_list_entity_descriptors_page_size.py index 0c8446d..eedc1b3 100644 --- a/tests/test_catalog_list_entity_descriptors_page_size.py +++ b/tests/test_catalog_list_entity_descriptors_page_size.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors", "-t", "component", "-z", "1"]) +def test(): + response = cli(["catalog", "list-descriptors", "-t", "service", "-p", "0", "-z", "1"]) assert (len(response['descriptors']) == 1) diff --git a/tests/test_catalog_list_entity_descriptors_yaml.py b/tests/test_catalog_list_entity_descriptors_yaml.py index 057c176..383974b 100644 --- a/tests/test_catalog_list_entity_descriptors_yaml.py +++ b/tests/test_catalog_list_entity_descriptors_yaml.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors", "-y", "-t", "component"]) - list = [descriptor for descriptor in response['descriptors'] if yaml.safe_load(descriptor)['info']['x-cortex-tag'] == "backend-worker"] - assert yaml.safe_load(list[0])['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" +def test(): + response = cli(["catalog", "list-descriptors", "-y", "--types", "service"]) + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "cli-test-service"] + assert list[0]['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" diff --git a/tests/test_catalog_list_include_archived.py b/tests/test_catalog_list_include_archived.py index b572e33..a9b33e4 100644 --- a/tests/test_catalog_list_include_archived.py +++ b/tests/test_catalog_list_include_archived.py @@ -1,8 +1,10 @@ -from common import * +from tests.helpers.utils import * def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-z", "500"]) - assert not any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should not find archived entity" + response = cli(["catalog", "archive", "-t", "cli-test-archive-entity"]) - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-a", "-z", "500"]) - assert any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should find archived entity" + response = cli(["catalog", "list", "-g", "cli-test", "-z", "500"]) + assert not any(entity['tag'] == 'cli-test-archive-entity' for entity in response['entities']), "Should not find archived entity" + + response = cli(["catalog", "list", "-g", "cli-test", "-a", "-z", "500"]) + assert any(entity['tag'] == 'cli-test-archive-entity' for entity in response['entities']), "Should find archived entity" diff --git a/tests/test_catalog_list_include_owners.py b/tests/test_catalog_list_include_owners.py index 5c339b3..03b8cea 100644 --- a/tests/test_catalog_list_include_owners.py +++ b/tests/test_catalog_list_include_owners.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-io"]) + response = cli(["catalog", "list", "-g", "cli-test", "-io"]) assert not(response['entities'][0]['owners']['teams'] is None), "Teams array should be returned in result" diff --git a/tests/test_catalog_list_page.py b/tests/test_catalog_list_page.py index 8c7d7b8..b8d29fa 100644 --- a/tests/test_catalog_list_page.py +++ b/tests/test_catalog_list_page.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-p", "0"]) + response = cli(["catalog", "list", "-g", "cli-test", "-p", "0"]) assert (len(response['entities']) > 0) diff --git a/tests/test_catalog_list_page_size.py b/tests/test_catalog_list_page_size.py index a91113c..046fd47 100644 --- a/tests/test_catalog_list_page_size.py +++ b/tests/test_catalog_list_page_size.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-z", "1"]) +def test(): + response = cli(["catalog", "list", "-g", "cli-test", "-p", "0", "-z", "1"]) assert (len(response['entities']) == 1) diff --git a/tests/test_catalog_patch_entity.py b/tests/test_catalog_patch_entity.py index bf967a1..70c76a6 100644 --- a/tests/test_catalog_patch_entity.py +++ b/tests/test_catalog_patch_entity.py @@ -1,18 +1,7 @@ -from common import * +from tests.helpers.utils import * def test(capsys): - cli(["-q", "catalog", "patch", "-f", "data/run-time/create-patch-entity.yaml"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() + cli(["catalog", "patch", "-a", "-f", "data/run-time/patch-entity.yaml"]) - response = cli_command(capsys, ["catalog", "descriptor", "-t", "patch-entity"]) - assert response['info']['x-cortex-tag'] == "patch-entity" - - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - cli(["-q", "catalog", "patch", "-a", "-f", "data/run-time/patch-entity.yaml"]) - capsys.readouterr() - - response = cli_command(capsys, ["custom-data", "get", "-t", "patch-entity", "-k", "owners"]) + response = cli(["custom-data", "get", "-t", "cli-test-patch-entity", "-k", "owners"]) assert 'owner-2' in response['value'], "owner-2 should have been merged in owners array" diff --git a/tests/test_catalog_retrieve_entity_descriptor.py b/tests/test_catalog_retrieve_entity_descriptor.py index 6a1dab3..7f687a4 100644 --- a/tests/test_catalog_retrieve_entity_descriptor.py +++ b/tests/test_catalog_retrieve_entity_descriptor.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "descriptor", "-t", "backend-worker"]) - assert response['info']['x-cortex-tag'] == "backend-worker" +def test(): + response = cli(["catalog", "descriptor", "-t", "cli-test-service"]) + assert response['info']['x-cortex-tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_descriptor_yaml.py b/tests/test_catalog_retrieve_entity_descriptor_yaml.py index 7ee95e3..2f323d0 100644 --- a/tests/test_catalog_retrieve_entity_descriptor_yaml.py +++ b/tests/test_catalog_retrieve_entity_descriptor_yaml.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "descriptor", "-y", "-t", "backend-worker"], "text") - assert yaml.safe_load(response)['info']['x-cortex-tag'] == "backend-worker" +def test(): + response = cli(["catalog", "descriptor", "-y", "-t", "cli-test-service"], ReturnType.STDOUT) + assert yaml.safe_load(response)['info']['x-cortex-tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_details.py b/tests/test_catalog_retrieve_entity_details.py index 0380f48..4af0be7 100644 --- a/tests/test_catalog_retrieve_entity_details.py +++ b/tests/test_catalog_retrieve_entity_details.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) - assert response['tag'] == "backend-worker" +def test(): + response = cli(["catalog", "details", "-t", "cli-test-service"]) + assert response['tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py index dee7ede..c8e1149 100644 --- a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py +++ b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker", "-i", "groups"]) - assert response['tag'] == "backend-worker" +def test(): + response = cli(["catalog", "details", "-t", "cli-test-service", "-i", "groups"]) + assert response['tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_scorecard_scores.py b/tests/test_catalog_retrieve_entity_scorecard_scores.py index 8097d64..d382457 100644 --- a/tests/test_catalog_retrieve_entity_scorecard_scores.py +++ b/tests/test_catalog_retrieve_entity_scorecard_scores.py @@ -1,7 +1,7 @@ -from common import * +from tests.helpers.utils import * @pytest.mark.skip(reason="Cannot rely on scorecard to have been evaluated. Need FR to force evaluation?") -def test(capsys): - response = cli_command(capsys, ["catalog", "scorecard-scores", "-t", "backend-worker"]) +def test(): + response = cli(["catalog", "scorecard-scores", "-t", "backend-worker"]) list = [scorecard for scorecard in response if scorecard['scorecardName'] == "Public API Test Production Readiness"] assert list[0]['score'] == 1 diff --git a/tests/test_catalog_unarchive_entity.py b/tests/test_catalog_unarchive_entity.py index b3a455c..102a506 100644 --- a/tests/test_catalog_unarchive_entity.py +++ b/tests/test_catalog_unarchive_entity.py @@ -1,13 +1,10 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/unarchive-entity.yaml"]) - cli(["-q", "catalog", "archive", "-t", "unarchive-entity"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() +def test(): + cli(["catalog", "archive", "-t", "cli-test-unarchive-entity"]) - response = cli_command(capsys, ["catalog", "details", "-t", "unarchive-entity"]) + response = cli(["catalog", "details", "-t", "cli-test-unarchive-entity"]) assert response['isArchived'] == True, "isArchived attribute should be true" - response = cli_command(capsys, ["catalog", "unarchive", "-t", "unarchive-entity"]) + response = cli(["catalog", "unarchive", "-t", "cli-test-unarchive-entity"]) assert response['isArchived'] == False, "isArchived attribute should not be true" diff --git a/tests/test_config_file.py b/tests/test_config_file.py index cc8f75a..6455f58 100644 --- a/tests/test_config_file.py +++ b/tests/test_config_file.py @@ -1,3 +1,4 @@ +from tests.helpers.utils import * """ Tests for the cortex CLI config file """ @@ -9,7 +10,6 @@ # Additionally, order is VERY IMPORTANT in this file because of the way CORTEX_API key is # deleted, set to invalid values, etc. Moving test order could impact the overall success # of pytest. Tread carefully here. -from cortexapps_cli.cortex import cli import io import os @@ -32,73 +32,25 @@ def test_config_file_api_key_quotes(tmp_path): api_key = "${cortex_api_key}" """) content = template.substitute(cortex_api_key=cortex_api_key) - print(content) f.write_text(content) - cli(["-c", str(f), "teams", "list"]) + cli(["-c", str(f), "entity-types", "list"]) @pytest.mark.serial -def test_environment_variables(capsys): - cli(["teams", "list"]) - out, err = capsys.readouterr() - #print(out) - print("ERR = " + err) - assert err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY", "Warning should be displayed by default" - - cli(["-q", "teams", "list"]) - out, err = capsys.readouterr() - assert not(err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY"), "Warning should be displayed with -q option" - -@pytest.mark.serial -def test_config_file_create(monkeypatch, tmp_path, delete_cortex_api_key): - with pytest.raises(SystemExit) as excinfo: - monkeypatch.setattr('sys.stdin', io.StringIO('Y')) - f = tmp_path / "test-config.txt" - cli(["-c", str(f), "catalog", "list"]) - -@pytest.mark.serial -def test_config_file_new(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config" - content = """ - [default] - api_key = REPLACE_WITH_YOUR_CORTEX_API_KEY - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - -@pytest.mark.serial -def test_export(capsys, delete_cortex_api_key): - cli(["-t", "rich-sandbox", "backup", "export"]) - out, err = capsys.readouterr() - last_line = out.strip().split("\n")[-1] - sys.stdout.write(out + "\n\n") - sys.stdout.write(last_line + "\n\n") - assert "rich-sandbox" in out - - export_directory = last_line.replace("Contents available in ", "") - - assert len(os.listdir(export_directory + "/catalog")) > 0, "catalog directory has files" - assert len(os.listdir(export_directory + "/scorecards")) > 0, "scorecards directory has files" - assert len(os.listdir(export_directory + "/resource-definitions")) > 0, "resource-definitions directory has files" +def test_config_file_create(monkeypatch, tmp_path): + monkeypatch.setattr('sys.stdin', io.StringIO('y')) + f = tmp_path / "test-config.txt" + response = cli(["-c", str(f), "-k", os.getenv('CORTEX_API_KEY'), "scorecards", "list"]) + assert any(scorecard['tag'] == 'cli-test-scorecard' for scorecard in response['scorecards']), "Should find scorecard with tag cli-test-scorecard" @pytest.mark.serial -def test_config_file_bad_api_key(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config_bad_api_key" - content = """ - [default] - api_key = invalidApiKey - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "catalog", "list", "-t", "backend-worker"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" +def test_config_file_bad_api_key(monkeypatch, tmp_path, delete_cortex_api_key): + monkeypatch.setattr('sys.stdin', io.StringIO('y')) + f = tmp_path / "test-config-bad-api-key.txt" + response = cli(["-c", str(f), "-k", "invalidApiKey", "scorecards", "list"], return_type=ReturnType.RAW) + assert "401 Client Error: Unauthorized" in str(response), "should get Unauthorized error" @pytest.mark.serial -def test_environment_variable_invalid_key(capsys): - with pytest.raises(SystemExit) as excinfo: - os.environ["CORTEX_API_KEY"] = "invalidKey" - cli(["teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" +def test_environment_variable_invalid_key(): + os.environ["CORTEX_API_KEY"] = "invalidKey" + response = cli(["scorecards", "list"], return_type=ReturnType.RAW) + assert "401 Client Error: Unauthorized" in str(response), "should get Unauthorized error" diff --git a/tests/test_conftest.py b/tests/test_conftest.py new file mode 100644 index 0000000..3d0e906 --- /dev/null +++ b/tests/test_conftest.py @@ -0,0 +1,27 @@ +import pytest +import inspect + +#@pytest.mark.trylast +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + terminal_reporter = config.pluginmanager.getplugin('terminalreporter') + config.pluginmanager.register(TestDescriptionPlugin(terminal_reporter), 'testdescription') + +class TestDescriptionPlugin: + + def __init__(self, terminal_reporter): + self.terminal_reporter = terminal_reporter + self.desc = None + + def pytest_runtest_protocol(self, item): + self.desc = inspect.getdoc(item.obj) + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_runtest_logstart(self, nodeid, location): + if self.terminal_reporter.verbosity == 0: + yield + else: + self.terminal_reporter.write('\n') + yield + if self.desc: + self.terminal_reporter.write(f'\n{self.desc} ') diff --git a/tests/test_custom_data_create_or_update_in_bulk.py b/tests/test_custom_data_create_or_update_in_bulk.py index 75653b8..d033207 100644 --- a/tests/test_custom_data_create_or_update_in_bulk.py +++ b/tests/test_custom_data_create_or_update_in_bulk.py @@ -1,13 +1,12 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - capsys.readouterr() +def test(): + cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-1"] + result = cli(["catalog", "details", "-t", "cli-test-service-caller"]) + list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-1"] assert list[0]['value'] == "value-1" - response = cli_command(capsys, ["catalog", "details", "-t", "ach-payments-nacha"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-4"] + result = cli( ["catalog", "details", "-t", "cli-test-service-callee"]) + list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-4"] assert list[0]['value'] == "value-4" diff --git a/tests/test_custom_data_delete.py b/tests/test_custom_data_delete.py index 98ecae9..a787c13 100644 --- a/tests/test_custom_data_delete.py +++ b/tests/test_custom_data_delete.py @@ -1,12 +1,12 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli_command(capsys, ["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) +def test(): + cli(["custom-data", "add", "-t", "cli-test-service", "-f", "data/run-time/custom-data-delete.json"]) - response = cli_command(capsys, ["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) - assert response['value'] == "yes" + result = cli(["custom-data", "get", "-t", "cli-test-service", "-k", "delete-me"]) + assert result['value'] == "yes" - cli(["-q", "custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) + cli(["custom-data", "delete", "-t", "cli-test-service", "-k", "delete-me"]) - response = cli_command(capsys, ["catalog", "details", "-t", "recommendations"]) - assert not any(metadata['key'] == 'delete-me' for metadata in response['metadata']) + result = cli(["catalog", "details", "-t", "cli-test-service"]) + assert not any(metadata['key'] == 'delete-me' for metadata in result['metadata']) diff --git a/tests/test_custom_data_list.py b/tests/test_custom_data_list.py index 259fd49..dca0e8d 100644 --- a/tests/test_custom_data_list.py +++ b/tests/test_custom_data_list.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "cicd"] +def test(): + result = cli(["catalog", "details", "-t", "cli-test-service"]) + list = [metadata for metadata in result['metadata'] if metadata['key'] == "cicd"] assert list[0]['value'] == "circle-ci" diff --git a/tests/test_custom_events_list.py b/tests/test_custom_events_list.py index e406bd9..cc0a555 100644 --- a/tests/test_custom_events_list.py +++ b/tests/test_custom_events_list.py @@ -1,15 +1,14 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "custom-events", "delete-all", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - cli(["-q", "custom-events", "create", "-t", "transaction-store", "-f", "data/run-time/custom-events.json"]) - capsys.readouterr() +def test(): + cli(["custom-events", "delete-all", "-t", "cli-test-service", "-y", "VALIDATE_SERVICE"]) + cli(["custom-events", "create", "-t", "cli-test-service", "-f", "data/run-time/custom-events.json"]) - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "list", "-t", "cli-test-service"]) + assert result['events'][0]['type'] == "VALIDATE_SERVICE" - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "list", "-t", "cli-test-service", "-y", "VALIDATE_SERVICE"]) + assert result['events'][0]['type'] == "VALIDATE_SERVICE" - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE", "-i", "2023-10-10T13:27:51.226"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "list", "-t", "cli-test-service", "-y", "VALIDATE_SERVICE", "-ts", "2023-10-10T13:27:51"]) + assert result['events'][0]['type'] == "VALIDATE_SERVICE" diff --git a/tests/test_custom_events_uuid.py b/tests/test_custom_events_uuid.py index 507c1d5..40842d4 100644 --- a/tests/test_custom_events_uuid.py +++ b/tests/test_custom_events_uuid.py @@ -1,26 +1,24 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["custom-events", "create", "-t", "warehousing", "-f", "data/run-time/custom-events-configure.json"]) - uuid = response['uuid'] +def test(): + result = cli(["custom-events", "create", "-t", "cli-test-service", "-f", "data/run-time/custom-events-configure.json"]) + uuid = result['uuid'] - cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - assert response['type'] == "CONFIG_SERVICE" + result = cli(["custom-events", "get-by-uuid", "-t", "cli-test-service", "-u", uuid]) + assert result['type'] == "CONFIG_SERVICE" - cli(["-q", "custom-events", "update-by-uuid", "-t", "warehousing", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - capsys.readouterr() + cli(["custom-events", "update-by-uuid", "-t", "cli-test-service", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - response = cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - assert response['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "get-by-uuid", "-t", "cli-test-service", "-u", uuid]) + assert result['type'] == "VALIDATE_SERVICE" - cli(["-q", "custom-events", "delete-by-uuid", "-t", "warehousing", "-u", uuid]) + cli(["custom-events", "delete-by-uuid", "-t", "cli-test-service", "-u", uuid]) # Custom event was deleted, so verify it cannot be retrieved. - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - out, err = capsys.readouterr() + # with pytest.raises(SystemExit) as excinfo: + result = cli(["custom-events", "get-by-uuid", "-t", "cli-test-service", "-u", uuid], ReturnType.RAW) + out = result.stdout + assert "HTTP Error 404: Not Found" in out, "An HTTP 404 error code should be thrown" + assert result.exit_code == 1 - assert out == "Bad Request" - assert excinfo.value.code == 144 - - cli(["-q", "custom-events", "delete-all", "-t", "warehousing"]) + cli(["custom-events", "delete-all", "-t", "cli-test-service"]) diff --git a/tests/test_custom_metrics.py b/tests/test_custom_metrics.py new file mode 100644 index 0000000..7074b39 --- /dev/null +++ b/tests/test_custom_metrics.py @@ -0,0 +1,29 @@ +from tests.helpers.utils import * + +# As part of this testing, filed: +# CET-19691: custom metrics POST API returns 200 response for un-processed metrics older than 6 months + +def test(): + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"], ReturnType.STDOUT) + + if "HTTP Error 403: Product access to [ENG_METRICS] not permitted" in result: + print("API key does not have access to custom metrics or feature not enabled in tenant, not running tests.") + return + + # No API support to create a custom metric. It can only be done in the UI, so check if this workspace has the + # 'vulnerabilities' custom metric defined. + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"], ReturnType.STDOUT) + if "HTTP Error 404: Not Found - CustomMetricKey not found" in result: + print("Custom metric named 'vulnerabilities' does not exist. It has to be created in the UI for this test to run.") + print("To create: Settings -> Eng Intelligence -> General -> Custom -> Add Metric -> (select API toggle).") + return + + date = today() + cli(["custom-metrics", "delete", "-t", "cli-test-service", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) + cli(["custom-metrics", "add", "-t", "cli-test-service", "-k", "vulnerabilities", "-v", "3.0"]) + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"]) + assert result['data'][0]['value'] == 3.0, "should have single value of 3.0" + + cli(["custom-metrics", "add-in-bulk", "-t", "cli-test-service", "-k", "vulnerabilities", "-v", f"{date}=1.0", "-v", f"{date}=2.0"]) + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"]) + assert result['total'] == 3, "should have total of 3 metrics data points" diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index d59f863..a9b0eb7 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,26 +1,25 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - callerTag = "fraud-analyzer" - calleeTag = "backend-worker" +def test(): + callerTag = "cli-test-service-caller" + calleeTag = "cli-test-service-callee" - cli(["-q", "dependencies", "delete-all", "-r", callerTag]) + cli(["dependencies", "delete-all", "-r", callerTag]) - cli_command(capsys, ["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) + cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli_command(capsys, ["dependencies", "add", "-r", callerTag, "-e", - calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies.json"]) - cli_command, (["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) - response = cli_command(capsys, ["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) - assert response["callerTag"] == callerTag, "callerTag should be " + callerTag - assert response["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag + cli(["dependencies", "create", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs"]) + cli(["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) + result = cli(["dependencies", "get", "-r", "cli-test-service-caller", "-e", "cli-test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) + assert result["callerTag"] == callerTag, "callerTag should be " + callerTag + assert result["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag - cli_command(capsys, ["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) + cli(["dependencies", "get", "-r", "cli-test-service-caller", "-e", "cli-test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) - response = cli_command(capsys, ["dependencies", "get-all", "-r", "fraud-analyzer", "-o"]) - assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in response["dependencies"]) + result = cli(["dependencies", "get-all", "-r", "cli-test-service-caller", "-o"]) + assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in result["dependencies"]) - cli(["-q", "dependencies", "delete", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/audit-logs"]) - cli(["-q", "dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["-q", "dependencies", "delete-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["-q", "dependencies", "delete-all", "-r", "fraud-analyzer"]) + cli(["dependencies", "delete", "-r", "cli-test-service-caller", "-e", "cli-test-service-callee", "-m", "GET", "-p", "/api/v1/audit-logs"]) + cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) + cli(["dependencies", "delete-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) + cli(["dependencies", "delete-all", "-r", "cli-test-service-caller"]) diff --git a/tests/test_deploys.py b/tests/test_deploys.py index 1913148..5583489 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -1,44 +1,68 @@ -from common import * +from tests.helpers.utils import * -def _add_deploy(capsys): - cli_command(capsys, ["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys.json"]) +def _add_deploy(): + cli(["deploys", "add", "-t", "cli-test-service", "-f", "data/run-time/deploys.json"]) -def _delete_all(capsys): - cli_command(capsys, ["deploys", "delete-all"]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) - assert len(response['deployments']) == 0, "All deployments for entity should be deleted" +def _delete_all(): + cli(["deploys", "delete-all"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) + assert len(result['deployments']) == 0, "All deployments for entity should be deleted" -def test_deploys(capsys): - _delete_all(capsys) +def test_deploys(): + _delete_all() - response = cli_command(capsys, ["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) - uuid = response['uuid'] + result = cli(["deploys", "add", "-t", "cli-test-service", "-f", "data/run-time/deploys-uuid.json"]) + uuid = result['uuid'] - # Add a second deploy. - _add_deploy(capsys) + print("uuid = " + uuid) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) - assert any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should find a deploy with uuid" - assert response['total'] == 2, "Two deploys should be returned for entity" + _add_deploy() + result = cli(["deploys", "list", "-t", "cli-test-service"]) + assert any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should find a deploy with uuid" + assert result['total'] == 2, "Two deploys should be returned for entity" - cli_command(capsys, ["deploys", "update-by-uuid", "-t", "shipping-integrations", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) - deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] + cli(["deploys", "update-by-uuid", "-t", "cli-test-service", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) + deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" + + cli(["deploys", "delete-by-uuid", "-t", "cli-test-service", "-u", uuid]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) + assert not any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should not find a deploy with uuid" + assert result['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" - cli_command(capsys, ["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-u", uuid]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) - assert not any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should not find a deploy with uuid" - assert response['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" + _add_deploy() + cli(["deploys", "delete", "-t", "cli-test-service", "-s", "SHA-123456"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) + assert not any(deploy['sha'] == "SHA-123456" for deploy in result['deployments']), "Should not find a deploy with sha that was deleted" - _add_deploy(capsys) - cli_command(capsys, ["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) - assert not any(deploy['sha'] == "SHA-123456" for deploy in response['deployments']), "Should not find a deploy with sha that was deleted" + _add_deploy() + cli(["deploys", "delete-by-filter", "-ty", "DEPLOY"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) + assert not any(deploy['type'] == "DEPLOY" for deploy in result['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" - _add_deploy(capsys) - cli_command(capsys, ["deploys", "delete-filter", "-y", "DEPLOY"]) - assert not any(deploy['type'] == "DEPLOY" for deploy in response['deployments']), "Should not find a deploy type 'DEPLOY' sha that was deleted" + result = cli(["deploys", "add", + "-t", "cli-test-service", + "--email", "julien@tpb.com", + "--name", "Julien", + "--environment", "PYPI.org", + "--sha", "SHA-123456", + "--title", "my title", + "--type", "DEPLOY", + "--url", "https://tpb.com", + "-c", "abc=123", + "-c", "def=456"]) + uuid = result['uuid'] + result = cli(["deploys", "list", "-t", "cli-test-service"]) + deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] + assert deploy[0]['sha'] == "SHA-123456", "Should find a deploy with sha" + assert deploy[0]['deployer']['email'] == "julien@tpb.com", "Email should be set for deploy" + assert deploy[0]['deployer']['name'] == "Julien", "Name should be set for deploy" + assert deploy[0]['environment'] == "PYPI.org", "environment should be set for deploy" + assert deploy[0]['title'] == "my title", "title should be set for deploy" + assert deploy[0]['type'] == "DEPLOY", "type should be set for deploy" + assert deploy[0]['customData']['abc'] == "123", "Custom data field should be populated" + assert deploy[0]['customData']['def'] == "456", "Custom data field should be populated" - _add_deploy(capsys) - _delete_all(capsys) + _add_deploy() + _delete_all() diff --git a/tests/test_discovery_audit_filter_on_source.py b/tests/test_discovery_audit_filter_on_source.py new file mode 100644 index 0000000..4ac4d5f --- /dev/null +++ b/tests/test_discovery_audit_filter_on_source.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get", "-s", "GITHUB"]) + diff --git a/tests/test_discovery_audit_filter_on_type.py b/tests/test_discovery_audit_filter_on_type.py new file mode 100644 index 0000000..b2e5199 --- /dev/null +++ b/tests/test_discovery_audit_filter_on_type.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get", "-ty", "NEW_REPOSITORY"]) + diff --git a/tests/test_discovery_audit_get.py b/tests/test_discovery_audit_get.py new file mode 100644 index 0000000..ed3f18a --- /dev/null +++ b/tests/test_discovery_audit_get.py @@ -0,0 +1,4 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get"]) diff --git a/tests/test_discovery_audit_get_include_ignored.py b/tests/test_discovery_audit_get_include_ignored.py new file mode 100644 index 0000000..d06b72b --- /dev/null +++ b/tests/test_discovery_audit_get_include_ignored.py @@ -0,0 +1,4 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get", "-ii"]) diff --git a/tests/test_docs.py b/tests/test_docs.py index 495c875..48a1014 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -1,18 +1,15 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli_command(capsys, ["catalog", "create", "-f", "data/run-time/docs-entity.yaml"]) +def test_docs(): + cli(["docs", "update", "-t", "cli-test-service", "-f", "data/run-time/docs.yaml"]) - cli_command(capsys, ["docs", "update", "-t", "docs-entity", "-f", "data/run-time/docs.yaml"]) + response = cli(["docs", "get", "-t", "cli-test-service"]) + spec = json.loads(response['spec']) + assert spec['info']['title'] == "Simple API overview", "Returned spec should have a title named 'Simple API overview'" - response = cli_command(capsys, ["docs", "get", "-t", "docs-entity"]) - spec = yaml.safe_load(response['spec']) - assert spec['info']['title'] == "Simple API overview", "API spec should have been retrieved" + cli(["docs", "delete", "-t", "cli-test-service"]) - cli_command(capsys, ["-q", "docs", "delete", "-t", "docs-entity"], "none") - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "docs", "get", "-t", "docs-entity"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 + result = cli(["docs", "get", "-t", "cli-test-service"], ReturnType.RAW) + out = result.stdout + assert "HTTP Error 404: Not Found" in out, "An HTTP 404 error code should be thrown" + assert result.exit_code == 1 diff --git a/tests/test_entity_types.py b/tests/test_entity_types.py index 7073cd5..8b2d65b 100644 --- a/tests/test_entity_types.py +++ b/tests/test_entity_types.py @@ -1,26 +1,17 @@ -from common import * -# Entity Types were previously known as resource definitions. That's why the CLI -# command is 'resource-definitions'. CLI will likely be updated in the future to -# deprecate this. +from tests.helpers.utils import * -def test(capsys): - entity_type = "public-api-type-empty-schema" - response = cli_command(capsys, ["resource-definitions", "list"]) +def test_resource_definitions(capsys): + response = cli(["entity-types", "list"]) + entity_types = response['definitions'] + assert any(definition['type'] == 'cli-test' for definition in entity_types), "Should find entity type named 'cli-test'" - if any(entity['type'] == entity_type for entity in response['definitions']): - cli(["-q", "catalog", "delete-by-type", "-t", entity_type]) - cli(["-q", "resource-definitions", "delete", "-t", entity_type]) + if any(definition['type'] == 'cli-test' for definition in entity_types): + cli(["entity-types", "delete", "-t", "cli-test"]) + cli(["entity-types", "create", "-f", "data/import/entity-types/cli-test.json"]) - cli_command(capsys, ["resource-definitions", "create", "-f", "data/run-time/create-entity-type-empty-schema.json"]) + response = cli(["entity-types", "list"]) + assert any(definition['type'] == 'cli-test' for definition in response['definitions']), "Should find entity type named 'cli-test'" - response = cli_command(capsys, ["resource-definitions", "list"]) - assert any(entity['type'] == entity_type for entity in response['definitions']), "Entity type should be returned in list" + cli(["entity-types", "get", "-t", "cli-test"]) - response = cli_command(capsys, ["resource-definitions", "get", "-t", entity_type]) - assert response['type'] == entity_type, "Type of returned entity type should be " + entity_type + "." - - cli_command(capsys, ["resource-definitions", "update", "-t", entity_type, "-f", "data/run-time/update-entity-type-empty-schema.json"]) - - response = cli_command(capsys, ["resource-definitions", "get", "-t", entity_type]) - assert response['name'] == "Public API Type With Empty Schema -- Update", "Name should be updated for entity type" - cli(["-q", "catalog", "delete-by-type", "-t", entity_type]) + cli(["entity-types", "update", "-t", "cli-test", "-f", "data/run-time/entity-type-update.json"]) diff --git a/tests/test_export.py b/tests/test_export.py new file mode 100644 index 0000000..dcc4470 --- /dev/null +++ b/tests/test_export.py @@ -0,0 +1,4 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["backup", "export", "-e", "workflows,scorecards"], ReturnType.STDOUT) diff --git a/tests/test_github.py b/tests/test_github.py deleted file mode 100644 index 1d9a59d..0000000 --- a/tests/test_github.py +++ /dev/null @@ -1,19 +0,0 @@ -from common import * -from cortex_github import * - -# I don't think these tests can reliably run in parallel. Can result in PyGitHub reporting errors like this: -# FAILED tests/test_github_cortex_yaml_in_root.py::test -# github.GithubException.GithubException: 409 {"message": "is at ef660f9 but expected 418d7ec", "documentation_url": -@pytest.mark.skipif(enable_ui_editing("SERVICE") == True, reason="Account flag ENABLE_UI_EDITING for SERVICE is true.") -@pytest.mark.skipif(os.getenv('CORTEX_ENV') != "staging" or os.getenv('CORTEX_TENANT') != "jeff-sandbox", reason="To prevent git commit clashes, the test for cortex.yaml in the root will only run for main API test in staging") -def test_github_cortex_yaml_in_root(capsys): - assert gitops_add(capsys, "data/run-time/gitops.tmpl", "cortex.yaml") == True, "failed to find commit in gitops-logs" - - response = cli_command(capsys, ["catalog", "details", "-t", "gitops-entity"]) - assert response['tag'] == "gitops-entity", "Entity details can be retrieved for gitops entity" - -@pytest.mark.skipif(enable_ui_editing("SERVICE") == True, reason="Account flag ENABLE_UI_EDITING for SERVICE is true.") -def test_github_entity_in_dot_cortex(capsys): - assert gitops_add(capsys, "data/run-time/gitops-catalog.tmpl", ".cortex/catalog/" + os.getenv('CORTEX_ENV') + "-" + os.getenv('CORTEX_TENANT') + "-gitops-catalog.yaml") == True, "failed to find commit in gitops-logs" - response = cli_command(capsys, ["catalog", "details", "-t", os.getenv('CORTEX_ENV') + "-" + os.getenv('CORTEX_TENANT') + "-gitops-catalog"]) - assert response['tag'] == os.getenv('CORTEX_ENV') + "-" + os.getenv('CORTEX_TENANT') + "-gitops-catalog" diff --git a/tests/test_gitops_logs.py b/tests/test_gitops_logs.py new file mode 100644 index 0000000..f2f53fc --- /dev/null +++ b/tests/test_gitops_logs.py @@ -0,0 +1,13 @@ +from tests.helpers.utils import * + +def test_gitops_logs_get(): + cli(["gitops-logs", "get"]) + +def test_gitops_logs_page_size(capsys): + response = cli(["gitops-logs", "get", "-p", "0", "-z", "1"]) + # Only run assert if there is at least one entry in the gitops logs + if response['totalPages'] > 0: + assert len(response['logs']) == 1, "Changing page size should return requested amount of entries" + else: + print("No gitops logs. Not running assertion test.") + diff --git a/tests/test_groups.py b/tests/test_groups.py index 3021f74..cae49c2 100644 --- a/tests/test_groups.py +++ b/tests/test_groups.py @@ -1,14 +1,10 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli_command(capsys, ["catalog", "create", "-f", "data/run-time/groups-entity.yaml"]) +def test_groups(): + cli(["groups", "add", "-t", "cli-test-service", "-g", "test-group-2,test-group-3"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) + assert any(group['tag'] == 'test-group-2' for group in response['groups']), "Should find group named test-group-2 in entity cli-test-service" - cli_command(capsys, ["groups", "add", "-t", "groups-entity", "-f", "data/run-time/groups.json"]) - - response = cli_command(capsys, ["groups", "get", "-t", "groups-entity"]) - assert any(group['tag'] == "group1" for group in response['groups']), "Entity should have 'group1' as a group" - - cli(["-q", "groups", "delete", "-t", "groups-entity", "-f", "data/run-time/groups.json"]) - - response = cli_command(capsys, ["groups", "get", "-t", "groups-entity"]) - assert not any(group['tag'] == "group1" for group in response['groups']), "Entity should NOT have 'group1' as a group" + cli(["groups", "delete", "-t", "cli-test-service", "-g", "test-group-2,test-group-3"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) + assert not(any(group['tag'] == 'test-group-2' for group in response['groups'])), "After delete, should not find group named test-group-2 in entity cli-test-service" diff --git a/tests/test_groups_input_file.py b/tests/test_groups_input_file.py new file mode 100644 index 0000000..687be38 --- /dev/null +++ b/tests/test_groups_input_file.py @@ -0,0 +1,15 @@ +from tests.helpers.utils import * + +def test(): + cli(["groups", "add", "-t", "cli-test-service", "-f", "tests/test-groups.json"]) + + cli(["groups", "add", "-t", "cli-test-service", "-f", "tests/test-groups.json"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) + assert any(group['tag'] == 'group1' for group in response['groups']), "should find group1 in list of groups" + assert any(group['tag'] == 'group2' for group in response['groups']), "should find group2 in list of groups" + + cli(["groups", "delete", "-t", "cli-test-service", "-f", "tests/test-groups.json"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) + + assert not(any(group['tag'] == 'group1' for group in response['groups'])), "should not find group1 in list of groups" + assert not(any(group['tag'] == 'group2' for group in response['groups'])), "should not find group2 in list of groups" diff --git a/tests/test_import.py b/tests/test_import.py new file mode 100644 index 0000000..1e47836 --- /dev/null +++ b/tests/test_import.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +@pytest.mark.setup +def test(): + response = cli(["backup", "import", "-d", "data/import"], return_type=ReturnType.STDOUT) + print(response) diff --git a/tests.orig/test_integrations_aws.py b/tests/test_integrations_aws.py similarity index 62% rename from tests.orig/test_integrations_aws.py rename to tests/test_integrations_aws.py index d64d717..86cd340 100644 --- a/tests.orig/test_integrations_aws.py +++ b/tests/test_integrations_aws.py @@ -1,9 +1,4 @@ -""" -Tests for aws integration commands. -""" -from cortexapps_cli.cortex import cli -import os -import responses +from tests.helpers.utils import * # Since responses are all mocked and no data validation is done by the CLI -- # we let the API handle validation -- we don't need valid input files. @@ -13,16 +8,19 @@ def _dummy_file(tmp_path): return f @responses.activate -def test_integrations_aws_add(tmp_path): +def test_integrations_aws_add(): + """ + Test adding a single configuration + """ responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", json=[{'accountId': 123, 'role:': 'test'}], status=200) - cli(["integrations", "aws", "add", "-a", "123", "-r", "test"]) + response = cli(["integrations", "aws", "add", "-a", "123", "-r", "test"]) + assert response[0]['accountId'] == 123, "accountId should be returned in response" @responses.activate def test_integrations_aws_delete(): responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/123456", status=200) cli(["integrations", "aws", "delete", "-a", "123456"]) - @responses.activate def test_integrations_aws_delete_all(): responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) @@ -34,15 +32,15 @@ def test_integrations_aws_get(): cli(["integrations", "aws", "get", "-a", "123456"]) @responses.activate -def test_integrations_aws_get_all(): +def test_integrations_aws_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) - cli(["integrations", "aws", "get-all"]) + cli(["integrations", "aws", "list"]) @responses.activate -def test_integrations_aws_update(tmp_path): - f = _dummy_file(tmp_path) +def test_integrations_aws_update(): + #f = _dummy_file(tmp_path) responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) - cli(["integrations", "aws", "update", "-f", str(f)]) + cli(["integrations", "aws", "update", "-c", "123=test", "-c", "456=test1"]) @responses.activate def test_integrations_aws_validate(): @@ -53,3 +51,13 @@ def test_integrations_aws_validate(): def test_integrations_aws_validate_all(): responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/all/validate", status=200) cli(["integrations", "aws", "validate-all"]) + +@responses.activate +def test_integrations_list_types(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/types", json=[{'total': 1, 'types': [{'type': 'AWS::EC2::Instance'}]}], status=200) + cli(["integrations", "aws", "list-types"]) + +@responses.activate +def test_integrations_aws_update_types(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/types", json=[{'types': [{'type': 'AWS::EC2::Instance'}]}], status=200) + cli(["integrations", "aws", "update-types", "-t", "AWS::EC2::Instance=true", "-t", "AWS::ECS::VPC=false"], ReturnType.RAW) diff --git a/tests/test_integrations_azure_devops.py b/tests/test_integrations_azure_devops.py new file mode 100644 index 0000000..484b49c --- /dev/null +++ b/tests/test_integrations_azure_devops.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_azure_devops_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_azure_devops_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration", json=[{'accountId': 123, 'role:': 'test'}], status=200) + cli(["integrations", "azure-devops", "add", "-a", "myAlias", "-h", "my.host.com", "-o", "my-slug", "-p", "123456", "-u", "steph.curry"]) + +@responses.activate +def test_integrations_azure_devops_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", json={}, status=200) + cli(["integrations", "azure-devops", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_azure_devops_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/test", status=200) + cli(["integrations", "azure-devops", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_azure_devops_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", status=200) + cli(["integrations", "azure-devops", "delete-all"]) + +@responses.activate +def test_integrations_azure_devops_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/test", json={}, status=200) + cli(["integrations", "azure-devops", "get", "-a", "test"]) + +@responses.activate +def test_integrations_azure_devops_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", json={}, status=200) + cli(["integrations", "azure-devops", "list"]) + +@responses.activate +def test_integrations_azure_devops_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/default-configuration", json={}, status=200) + cli(["integrations", "azure-devops", "get-default"]) + +@responses.activate +def test_integrations_azure_devops_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/test", json={}, status=200) + cli(["integrations", "azure-devops", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_azure_devops_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/validate/test", json={}, status=200) + cli(["integrations", "azure-devops", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_azure_devops_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/validate", json={}, status=200) + cli(["integrations", "azure-devops", "validate-all"]) diff --git a/tests.orig/test_integrations_azure_resources.py b/tests/test_integrations_azure_resources.py similarity index 71% rename from tests.orig/test_integrations_azure_resources.py rename to tests/test_integrations_azure_resources.py index 49e9963..49f2576 100644 --- a/tests.orig/test_integrations_azure_resources.py +++ b/tests/test_integrations_azure_resources.py @@ -1,11 +1,4 @@ -""" -Tests for azure-resources integration commands. - -These tests all use mock responses. -""" -from cortexapps_cli.cortex import cli -import os -import responses +from tests.helpers.utils import * # Since responses are all mocked and no data validation is done by the CLI -- # we let the API handle validation -- we don't need valid input files. @@ -15,10 +8,9 @@ def _dummy_file(tmp_path): return f @responses.activate -def test_integrations_azure_resources_add(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration", json={}, status=200) - cli(["integrations", "azure-resources", "add", "-f", str(f)]) +def test_integrations_azure_resources_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration", json=[{'accountId': 123, 'role:': 'test'}], status=200) + cli(["integrations", "azure-resources", "add", "-a", "myAlias", "-h", "my.host.com", "-o", "my-slug", "-p", "123456", "-u", "steph.curry"]) @responses.activate def test_integrations_azure_resources_add_multiple(tmp_path): @@ -42,9 +34,9 @@ def test_integrations_azure_resources_get(): cli(["integrations", "azure-resources", "get", "-a", "test"]) @responses.activate -def test_integrations_azure_resources_get_all(): +def test_integrations_azure_resources_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", json={}, status=200) - cli(["integrations", "azure-resources", "get-all"]) + cli(["integrations", "azure-resources", "list"]) @responses.activate def test_integrations_azure_resources_get_default(): @@ -52,10 +44,9 @@ def test_integrations_azure_resources_get_default(): cli(["integrations", "azure-resources", "get-default"]) @responses.activate -def test_integrations_azure_resources_update(tmp_path): - f = _dummy_file(tmp_path) +def test_integrations_azure_resources_update(): responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", json={}, status=200) - cli(["integrations", "azure-resources", "update", "-a", "test", "-f", str(f)]) + cli(["integrations", "azure-resources", "update", "-a", "test", "-i"]) @responses.activate def test_integrations_azure_resources_validate(): @@ -66,3 +57,13 @@ def test_integrations_azure_resources_validate(): def test_integrations_azure_resources_validate_all(): responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/validate", json={}, status=200) cli(["integrations", "azure-resources", "validate-all"]) + +@responses.activate +def test_integrations_list_types(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resoures/types", json={}, status=200) + cli(["integrations", "azure-resources", "list-types"]) + +@responses.activate +def test_integrations_azure_resoures_update_types(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resoures/types", json={}, status=200) + cli(["integrations", "azure-resources", "update-types", "-t", "microsoft.insights/workbooks=true", "-t", "microsoft.resources/subscriptions=false"], ReturnType.RAW) diff --git a/tests/test_integrations_circleci.py b/tests/test_integrations_circleci.py new file mode 100644 index 0000000..e738404 --- /dev/null +++ b/tests/test_integrations_circleci.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_circle_ci_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_circle_ci_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration", json={}, status=200) + cli(["integrations", "circleci", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_circle_ci_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", json={}, status=200) + cli(["integrations", "circleci", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_circle_ci_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/test", status=200) + cli(["integrations", "circleci", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", status=200) + cli(["integrations", "circleci", "delete-all"]) + +@responses.activate +def test_integrations_circle_ci_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/test", json={}, status=200) + cli(["integrations", "circleci", "get", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", json={}, status=200) + cli(["integrations", "circleci", "list"]) + +@responses.activate +def test_integrations_circle_ci_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/default-configuration", json={}, status=200) + cli(["integrations", "circleci", "get-default"]) + +@responses.activate +def test_integrations_circle_ci_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/test", json={}, status=200) + cli(["integrations", "circleci", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_circle_ci_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/validate/test", json={}, status=200) + cli(["integrations", "circleci", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/validate", json={}, status=200) + cli(["integrations", "circleci", "validate-all"]) diff --git a/tests/test_integrations_coralogix.py b/tests/test_integrations_coralogix.py new file mode 100644 index 0000000..52bcd53 --- /dev/null +++ b/tests/test_integrations_coralogix.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_coralogix_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_coralogix_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration", json={}, status=200) + cli(["integrations", "coralogix", "add", "-a", "myAlias", "-r", "US1", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_coralogix_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", json={}, status=200) + cli(["integrations", "coralogix", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_coralogix_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/test", status=200) + cli(["integrations", "coralogix", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_coralogix_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", status=200) + cli(["integrations", "coralogix", "delete-all"]) + +@responses.activate +def test_integrations_coralogix_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/test", json={}, status=200) + cli(["integrations", "coralogix", "get", "-a", "test"]) + +@responses.activate +def test_integrations_coralogix_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", json={}, status=200) + cli(["integrations", "coralogix", "list"]) + +@responses.activate +def test_integrations_coralogix_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/default-configuration", json={}, status=200) + cli(["integrations", "coralogix", "get-default"]) + +@responses.activate +def test_integrations_coralogix_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/test", json={}, status=200) + cli(["integrations", "coralogix", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_coralogix_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate/test", json={}, status=200) + cli(["integrations", "coralogix", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_coralogix_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate", json={}, status=200) + cli(["integrations", "coralogix", "validate-all"]) diff --git a/tests/test_integrations_datadog.py b/tests/test_integrations_datadog.py new file mode 100644 index 0000000..bae6289 --- /dev/null +++ b/tests/test_integrations_datadog.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_datadog_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_datadog_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration", json={}, status=200) + cli(["integrations", "datadog", "add", "-a", "myAlias", "-r", "US1", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_datadog_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", json={}, status=200) + cli(["integrations", "datadog", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_datadog_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/test", status=200) + cli(["integrations", "datadog", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_datadog_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", status=200) + cli(["integrations", "datadog", "delete-all"]) + +@responses.activate +def test_integrations_datadog_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/test", json={}, status=200) + cli(["integrations", "datadog", "get", "-a", "test"]) + +@responses.activate +def test_integrations_datadog_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", json={}, status=200) + cli(["integrations", "datadog", "list"]) + +@responses.activate +def test_integrations_datadog_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/default-configuration", json={}, status=200) + cli(["integrations", "datadog", "get-default"]) + +@responses.activate +def test_integrations_datadog_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/test", json={}, status=200) + cli(["integrations", "datadog", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_datadog_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/validate/test", json={}, status=200) + cli(["integrations", "datadog", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_datadog_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/validate", json={}, status=200) + cli(["integrations", "datadog", "validate-all"]) diff --git a/tests/test_integrations_github.py b/tests/test_integrations_github.py new file mode 100644 index 0000000..1490b9e --- /dev/null +++ b/tests/test_integrations_github.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_github_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_github_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration", json={}, status=200) + cli(["integrations", "github", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_github_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", json={}, status=200) + cli(["integrations", "github", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_github_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/test", status=200) + cli(["integrations", "github", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_github_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", status=200) + cli(["integrations", "github", "delete-all"]) + +@responses.activate +def test_integrations_github_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/test", json={}, status=200) + cli(["integrations", "github", "get", "-a", "test"]) + +@responses.activate +def test_integrations_github_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", json={}, status=200) + cli(["integrations", "github", "list"]) + +@responses.activate +def test_integrations_github_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/default-configuration", json={}, status=200) + cli(["integrations", "github", "get-default"]) + +@responses.activate +def test_integrations_github_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/test", json={}, status=200) + cli(["integrations", "github", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_github_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/validate/test", json={}, status=200) + cli(["integrations", "github", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_github_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/validate", json={}, status=200) + cli(["integrations", "github", "validate-all"]) diff --git a/tests/test_integrations_gitlab.py b/tests/test_integrations_gitlab.py new file mode 100644 index 0000000..8ff6e01 --- /dev/null +++ b/tests/test_integrations_gitlab.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_gitlab_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_gitlab_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration", json={}, status=200) + cli(["integrations", "gitlab", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_gitlab_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", json={}, status=200) + cli(["integrations", "gitlab", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_gitlab_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/test", status=200) + cli(["integrations", "gitlab", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_gitlab_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", status=200) + cli(["integrations", "gitlab", "delete-all"]) + +@responses.activate +def test_integrations_gitlab_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/test", json={}, status=200) + cli(["integrations", "gitlab", "get", "-a", "test"]) + +@responses.activate +def test_integrations_gitlab_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", json={}, status=200) + cli(["integrations", "gitlab", "list"]) + +@responses.activate +def test_integrations_gitlab_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/default-configuration", json={}, status=200) + cli(["integrations", "gitlab", "get-default"]) + +@responses.activate +def test_integrations_gitlab_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/test", json={}, status=200) + cli(["integrations", "gitlab", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_gitlab_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/validate/test", json={}, status=200) + cli(["integrations", "gitlab", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_gitlab_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/validate", json={}, status=200) + cli(["integrations", "gitlab", "validate-all"]) diff --git a/tests.orig/test_integrations_incidentio.py b/tests/test_integrations_incidentio.py similarity index 54% rename from tests.orig/test_integrations_incidentio.py rename to tests/test_integrations_incidentio.py index 7ce9d79..325c26d 100644 --- a/tests.orig/test_integrations_incidentio.py +++ b/tests/test_integrations_incidentio.py @@ -1,12 +1,4 @@ -""" -Tests for incident.io integration commands. - -These tests all use mock responses. -""" -from cortexapps_cli.cortex import cli -import json -import os -import responses +from tests.helpers.utils import * # Since responses are all mocked and no data validation is done by the CLI -- # we let the API handle validation -- we don't need valid input files. @@ -16,15 +8,14 @@ def _dummy_file(tmp_path): return f @responses.activate -def test_integrations_incidentio_add(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "add", "-f", str(f)]) +def test_integrations_incidentio_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration", json={}, status=200) + cli(["integrations", "incidentio", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) @responses.activate def test_integrations_incidentio_add_multiple(tmp_path): f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json={}, status=200) cli(["integrations", "incidentio", "add-multiple", "-f", str(f)]) @responses.activate @@ -39,31 +30,30 @@ def test_integrations_incidentio_delete_all(): @responses.activate def test_integrations_incidentio_get(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json={}, status=200) cli(["integrations", "incidentio", "get", "-a", "test"]) @responses.activate -def test_integrations_incidentio_get_all(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "get-all"]) +def test_integrations_incidentio_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json={}, status=200) + cli(["integrations", "incidentio", "list"]) @responses.activate def test_integrations_incidentio_get_default(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/default-configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/default-configuration", json={}, status=200) cli(["integrations", "incidentio", "get-default"]) @responses.activate -def test_integrations_incidentio_update(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json=[{'alias:': 'test', 'isDefault': json.dumps("true")}], status=200) - cli(["integrations", "incidentio", "update", "-a", "test", "-f", str(f)]) +def test_integrations_incidentio_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json={}, status=200) + cli(["integrations", "incidentio", "update", "-a", "test", "-i"]) @responses.activate def test_integrations_incidentio_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate/test", json={}, status=200) cli(["integrations", "incidentio", "validate", "-a", "test"]) @responses.activate def test_integrations_incidentio_validate_all(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate", json={}, status=200) cli(["integrations", "incidentio", "validate-all"]) diff --git a/tests/test_integrations_launchdarkly.py b/tests/test_integrations_launchdarkly.py new file mode 100644 index 0000000..5657d87 --- /dev/null +++ b/tests/test_integrations_launchdarkly.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_launchdarkly_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_launchdarkly_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration", json={}, status=200) + cli(["integrations", "launchdarkly", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_launchdarkly_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", json={}, status=200) + cli(["integrations", "launchdarkly", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_launchdarkly_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/test", status=200) + cli(["integrations", "launchdarkly", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_launchdarkly_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", status=200) + cli(["integrations", "launchdarkly", "delete-all"]) + +@responses.activate +def test_integrations_launchdarkly_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/test", json={}, status=200) + cli(["integrations", "launchdarkly", "get", "-a", "test"]) + +@responses.activate +def test_integrations_launchdarkly_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", json={}, status=200) + cli(["integrations", "launchdarkly", "list"]) + +@responses.activate +def test_integrations_launchdarkly_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/default-configuration", json={}, status=200) + cli(["integrations", "launchdarkly", "get-default"]) + +@responses.activate +def test_integrations_launchdarkly_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/test", json={}, status=200) + cli(["integrations", "launchdarkly", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_launchdarkly_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate/test", json={}, status=200) + cli(["integrations", "launchdarkly", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_launchdarkly_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate", json={}, status=200) + cli(["integrations", "launchdarkly", "validate-all"]) diff --git a/tests.orig/test_integrations_newrelic.py b/tests/test_integrations_newrelic.py similarity index 52% rename from tests.orig/test_integrations_newrelic.py rename to tests/test_integrations_newrelic.py index 4f62368..b54e205 100644 --- a/tests.orig/test_integrations_newrelic.py +++ b/tests/test_integrations_newrelic.py @@ -1,12 +1,4 @@ -""" -Tests for newrelic integration commands. - -These tests all use mock responses. -""" -from cortexapps_cli.cortex import cli -import json -import responses -import os +from tests.helpers.utils import * # Since responses are all mocked and no data validation is done by the CLI -- # we let the API handle validation -- we don't need valid input files. @@ -16,15 +8,14 @@ def _dummy_file(tmp_path): return f @responses.activate -def test_integrations_newrelic_add(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "add", "-f", str(f)]) +def test_integrations_newrelic_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration", json={}, status=200) + cli(["integrations", "newrelic", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) @responses.activate def test_integrations_newrelic_add_multiple(tmp_path): f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json={}, status=200) cli(["integrations", "newrelic", "add-multiple", "-f", str(f)]) @responses.activate @@ -39,31 +30,30 @@ def test_integrations_newrelic_delete_all(): @responses.activate def test_integrations_newrelic_get(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json={}, status=200) cli(["integrations", "newrelic", "get", "-a", "test"]) @responses.activate -def test_integrations_newrelic_get_all(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "get-all"]) +def test_integrations_newrelic_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json={}, status=200) + cli(["integrations", "newrelic", "list"]) @responses.activate def test_integrations_newrelic_get_default(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/default-configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/default-configuration", json={}, status=200) cli(["integrations", "newrelic", "get-default"]) @responses.activate -def test_integrations_newrelic_update(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json=[{'alias:': 'test', 'isDefault': json.dumps("true")}], status=200) - cli(["integrations", "newrelic", "update", "-a", "test", "-f", str(f)]) +def test_integrations_newrelic_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json={}, status=200) + cli(["integrations", "newrelic", "update", "-a", "test", "-i"]) @responses.activate def test_integrations_newrelic_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate/test", json={}, status=200) cli(["integrations", "newrelic", "validate", "-a", "test"]) @responses.activate def test_integrations_newrelic_validate_all(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate", json={}, status=200) cli(["integrations", "newrelic", "validate-all"]) diff --git a/tests/test_integrations_pagerduty.py b/tests/test_integrations_pagerduty.py new file mode 100644 index 0000000..7c5271d --- /dev/null +++ b/tests/test_integrations_pagerduty.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_circle_ci_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_circle_ci_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration", json={}, status=200) + cli(["integrations", "pagerduty", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_circle_ci_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", json={}, status=200) + cli(["integrations", "pagerduty", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_circle_ci_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/test", status=200) + cli(["integrations", "pagerduty", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", status=200) + cli(["integrations", "pagerduty", "delete-all"]) + +@responses.activate +def test_integrations_circle_ci_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/test", json={}, status=200) + cli(["integrations", "pagerduty", "get", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", json={}, status=200) + cli(["integrations", "pagerduty", "list"]) + +@responses.activate +def test_integrations_circle_ci_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/default-configuration", json={}, status=200) + cli(["integrations", "pagerduty", "get-default"]) + +@responses.activate +def test_integrations_circle_ci_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/test", json={}, status=200) + cli(["integrations", "pagerduty", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_circle_ci_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/validate/test", json={}, status=200) + cli(["integrations", "pagerduty", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/validate", json={}, status=200) + cli(["integrations", "pagerduty", "validate-all"]) diff --git a/tests/test_integrations_prometheus.py b/tests/test_integrations_prometheus.py new file mode 100644 index 0000000..83ff80c --- /dev/null +++ b/tests/test_integrations_prometheus.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_prometheus_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_prometheus_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration", json={}, status=200) + cli(["integrations", "prometheus", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_prometheus_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", json={}, status=200) + cli(["integrations", "prometheus", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_prometheus_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/test", status=200) + cli(["integrations", "prometheus", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_prometheus_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", status=200) + cli(["integrations", "prometheus", "delete-all"]) + +@responses.activate +def test_integrations_prometheus_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/test", json={}, status=200) + cli(["integrations", "prometheus", "get", "-a", "test"]) + +@responses.activate +def test_integrations_prometheus_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", json={}, status=200) + cli(["integrations", "prometheus", "list"]) + +@responses.activate +def test_integrations_prometheus_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/default-configuration", json={}, status=200) + cli(["integrations", "prometheus", "get-default"]) + +@responses.activate +def test_integrations_prometheus_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/test", json={}, status=200) + cli(["integrations", "prometheus", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_prometheus_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/validate/test", json={}, status=200) + cli(["integrations", "prometheus", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_prometheus_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/validate", json={}, status=200) + cli(["integrations", "prometheus", "validate-all"]) diff --git a/tests/test_integrations_sonarqube.py b/tests/test_integrations_sonarqube.py new file mode 100644 index 0000000..9195582 --- /dev/null +++ b/tests/test_integrations_sonarqube.py @@ -0,0 +1,80 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_sonarqube_add.json" + + content = """ + { + "configurations": [ + { + "alias": "multiple-1", + "host": "cortex.io", + "isDefault": false, + "token": "string" + }, + { + "alias": "multiple-2", + "host": "cortex.io", + "isDefault": false, + "token": "string" + } + ] + } + """ + + f.write_text(content) + + return f + +@responses.activate +def test_integrations_sonarqube_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration", json={}, status=200) + cli(["integrations", "sonarqube", "add", "-a", "myAlias", "-h", "cortex.io", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_sonarqube_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", json={}, status=200) + cli(["integrations", "sonarqube", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_sonarqube_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/test", status=200) + cli(["integrations", "sonarqube", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_sonarqube_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", status=200) + cli(["integrations", "sonarqube", "delete-all"]) + +@responses.activate +def test_integrations_sonarqube_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/test", json={}, status=200) + cli(["integrations", "sonarqube", "get", "-a", "test"]) + +@responses.activate +def test_integrations_sonarqube_list(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", json={}, status=200) + cli(["integrations", "sonarqube", "list"]) + +@responses.activate +def test_integrations_sonarqube_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/default-configuration", json={}, status=200) + cli(["integrations", "sonarqube", "get-default"]) + +@responses.activate +def test_integrations_sonarqube_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/test", json={}, status=200) + cli(["integrations", "sonarqube", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_sonarqube_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate/test", json={}, status=200) + cli(["integrations", "sonarqube", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_sonarqube_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate", json={}, status=200) + cli(["integrations", "sonarqube", "validate-all"]) diff --git a/tests/test_ip_allowlist.py b/tests/test_ip_allowlist.py index 039616c..7004cca 100644 --- a/tests/test_ip_allowlist.py +++ b/tests/test_ip_allowlist.py @@ -1,49 +1,15 @@ -from common import * +from tests.helpers.utils import * +import requests -def _ip_allowlist_input(tmp_path, ip_address, description): - f = tmp_path / "test_ip_allowlist_input.json" - template = Template(""" - { - "entries": [ - { - "address": "${ip_address}", - "description": "string" - }, - { - "address": "127.0.0.1", - "description": "${description}" - } - ] - } - """) - content = template.substitute(ip_address=ip_address, description=description) - f.write_text(content) - return f +def test(capsys, tmp_path): + ip_address = "0.0.0.0/0" + ip_param = ip_address + ":My current IP" + cli(["ip-allowlist", "validate", "-a", ip_param]) + cli(["ip-allowlist", "replace", "-a", ip_param]) -def test(tmp_path, capsys): - ip_address = requests.get("https://ip.me").text.strip() + response = cli(["ip-allowlist", "get"]) + assert response['entries'][0]['address'] == ip_address, "Should have a single IP address in allowlist" - description = "initial description" - f = _ip_allowlist_input(tmp_path, ip_address, description) - response = cli_command(capsys, ["ip-allowlist", "validate", "-f", str(f)]) - - # Initial replace - cli_command(capsys, ["ip-allowlist", "replace", "-f", str(f)]) - response = cli_command(capsys, ["ip-allowlist", "get"]) - assert any(entry['description'] == description for entry in response['entries']), "Allowlist entry should have expected description" - - # Updated replace - updated_description = "updated description" - f = _ip_allowlist_input(tmp_path, ip_address, updated_description) - cli_command(capsys, ["ip-allowlist", "replace", "-f", str(f)]) - response = cli_command(capsys, ["ip-allowlist", "get"]) - assert any(entry['description'] == updated_description for entry in response['entries']), "Allowlist entry should be updated" - - cli_command(capsys, ["ip-allowlist", "replace", "-f", "data/run-time/ip_allowlist_empty.json"]) - - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "catalog", "ip-allowlist", "-f", "data/run-time/ip_allowlist_invalid.json"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 + cli(["ip-allowlist", "remove-all"]) + response = cli(["ip-allowlist", "get"]) + assert len(response['entries']) == 0, "Should not have any entries in allowlist" diff --git a/tests/test_packages.py b/tests/test_packages.py index adbc8d4..876f28e 100644 --- a/tests/test_packages.py +++ b/tests/test_packages.py @@ -1,28 +1,68 @@ -from common import * +from tests.helpers.utils import * -# When trying to put python and node tests in separate tests running in parallel, got 409 Conflict HTTP errors, -# even when using different entity tag. -def test(capsys): - cli_command(capsys, ["packages", "go", "upload", "-t", "sso-integration", "-f", "data/run-time/packages_go.sum"]) - packages(capsys, "go", "GO", "3.3.0", "github.com/gofrs/uuid", "sso-integration") +def test_packages(): + cli(["packages", "go", "upload", "-t", "cli-test-service", "-f", "tests/test_packages_go.sum"]) - cli_command(capsys, ["packages", "python", "upload-pipfile", "-t", "sso-integration", "-f", "data/run-time/packages_python_pipfile.lock"]) - packages(capsys, "python", "PYTHON", "2022.12.7", "certifi", "sso-integration") + cli(["packages", "java", "upload-single", "-t", "cli-test-service", "-f", "tests/test_packages_java_single.json"]) - cli_command(capsys, ["packages", "python", "upload-requirements", "-t", "sso-integration", "-f", "data/run-time/packages_python_requirements.txt"]) - packages(capsys, "python", "PYTHON", "1.0.6", "contourpy", "sso-integration") + cli(["packages", "java", "upload-multiple", "-t", "cli-test-service", "-f", "tests/test_packages_java_multiple.json"]) - cli_command(capsys, ["packages", "node", "upload-package", "-t", "sso-integration", "-f", "data/run-time/packages_node_package.json"]) - packages(capsys, "node", "NODE", "^4.1.11", "clean-css", "sso-integration") + # upload-pipfile will replace any existing PYTHON package entries for an entity. It's assumed you will use either + # pipfile.lock or requirements.txt, but not both. + # So we need to test here because these packages will be overwritten by the upload-requirements command. + cli(["packages", "python", "upload-pipfile", "-t", "cli-test-service", "-f", "tests/test_packages_python_pipfile.lock"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) + assert any(package['name'] == 'certifi' and package['packageType'] == "PYTHON" for package in response), "Should find Python pipfile package" - cli_command(capsys, ["packages", "node", "upload-package-lock", "-t", "sso-integration", "-f", "data/run-time/packages_node_package_lock.json"]) - packages(capsys, "node", "NODE", "4.2.6", "@angular/common", "sso-integration") + cli(["packages", "python", "upload-requirements", "-t", "cli-test-service", "-f", "tests/test_packages_python_requirements.txt"]) - cli_command(capsys, ["packages", "node", "upload-yarn-lock", "-t", "sso-integration", "-f", "data/run-time/packages_node_yarn.lock"]) - packages(capsys, "node", "NODE", "6.16.5", "@types/babylon", "sso-integration") + # Similar store for Node as Python. Only one file type is supported. + cli(["packages", "node", "upload-package-json", "-t", "cli-test-service", "-f", "tests/test_packages_node_package.json"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) + assert any(package['name'] == 'clean-css' and package['packageType'] == "NODE" for package in response), "Should find Node package.json package" - cli_command(capsys, ["packages", "nuget", "upload-packages-lock", "-t", "sso-integration", "-f", "data/run-time/packages_nuget_packages_lock.json"]) - packages(capsys, "nuget", "NUGET", "1.0.0", "Microsoft.NETFramework.ReferenceAssemblies", "sso-integration") + cli(["packages", "node", "upload-package-lock", "-t", "cli-test-service", "-f", "tests/test_packages_node_package_lock.json"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) + assert any(package['name'] == '@angular/common' and package['packageType'] == "NODE" for package in response), "Should find Node package.lock package" - cli_command(capsys, ["packages", "nuget", "upload-csproj", "-t", "sso-integration", "-f", "data/run-time/packages_nuget.csproj"]) - packages(capsys, "nuget", "NUGET", "7.1.1", "CsvHelper", "sso-integration") + cli(["packages", "node", "upload-yarn-lock", "-t", "cli-test-service", "-f", "tests/test_packages_node_yarn.lock"]) + + cli(["packages", "nuget", "upload-packages-lock", "-t", "cli-test-service", "-f", "tests/test_packages_nuget_packages_lock.json"]) + + cli(["packages", "nuget", "upload-csproj", "-t", "cli-test-service", "-f", "tests/test_packages_nuget.csproj"]) + + response = cli(["packages", "list", "-t", "cli-test-service"]) + assert any(package['name'] == 'github.com/cortex.io/catalog' and package['packageType'] == "GO" for package in response), "Should find GO package" + assert any(package['name'] == 'io.cortex.scorecards' and package['packageType'] == "JAVA" for package in response), "Should find single-updated Java package" + assert any(package['name'] == 'io.cortex.teams' and package['packageType'] == "JAVA" for package in response), "Should find multiple-update Java package" + assert any(package['name'] == 'cycler' and package['packageType'] == "PYTHON" for package in response), "Should find Python requirement.txt package" + assert any(package['name'] == '@types/babylon' and package['packageType'] == "NODE" for package in response), "Should find Node yarn.lock package" + assert any(package['name'] == 'MicroBuild.Core' and package['packageType'] == "NUGET" for package in response), "Should find NuGet package" + + cli(["packages", "go", "delete", "-t", "cli-test-service", "-n", "github.com/cortex.io/catalog"]) + + cli(["packages", "java", "delete", "-t", "cli-test-service", "-n", "io.cortex.scorecards"]) + cli(["packages", "java", "delete", "-t", "cli-test-service", "-n", "io.cortex.teams"]) + + cli(["packages", "python", "delete", "-t", "cli-test-service", "-n", "cycler"]) + + cli(["packages", "node", "delete", "-t", "cli-test-service", "-n", "@types/babylon"]) + + cli(["packages", "nuget", "delete", "-t", "cli-test-service", "-n", "MicroBuild.Core"]) + + response = cli(["packages", "list", "-t", "cli-test-service"]) + + assert not any(package['name'] == 'github.com/cortex.io/catalog' and package['packageType'] == "GO" for package in response), "Should not find deleted GO package" + + assert not any(package['name'] == 'io.cortex.scorecards' and package['packageType'] == "JAVA" for package in response), "Should not find deleted single-updated Java package" + assert not any(package['name'] == 'io.cortex.teams' and package['packageType'] == "JAVA" for package in response), "Should not find deleted multiple-update Java package" + + assert not any(package['name'] == 'cycler' and package['packageType'] == "PYTHON" for package in response), "Should not find deleted Python requirement.txt package" + + assert not any(package['name'] == '@types/babylon' and package['packageType'] == "NODE" for package in response), "Should not find deleted Node yarn.lock package" + + assert not any(package['name'] == 'MicroBuild.Core' and package['packageType'] == "NUGET" for package in response), "Should not find deleted NuGet package" + + cli(["packages", "delete-all", "-t", "cli-test-service"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) + assert len(response) == 0, "Should not find any packages after delete-all" diff --git a/tests.orig/test_packages_go.sum b/tests/test_packages_go.sum similarity index 100% rename from tests.orig/test_packages_go.sum rename to tests/test_packages_go.sum diff --git a/tests/test_packages_java.py b/tests/test_packages_java.py deleted file mode 100644 index fb6aa21..0000000 --- a/tests/test_packages_java.py +++ /dev/null @@ -1,6 +0,0 @@ -from common import * - -def test(capsys): - cli_command(capsys, ["packages", "java", "upload-single", "-t", "sso-integration", "-f", "data/run-time/packages_java_single.json"]) - cli_command(capsys, ["packages", "java", "upload-multiple", "-t", "sso-integration", "-f", "data/run-time/packages_java_multiple.json"]) - packages(capsys, "java", "JAVA", "3.3.3", "io.cortex.teams", "sso-integration") diff --git a/tests.orig/test_packages_java_multiple.json b/tests/test_packages_java_multiple.json similarity index 100% rename from tests.orig/test_packages_java_multiple.json rename to tests/test_packages_java_multiple.json diff --git a/tests.orig/test_packages_java_single.json b/tests/test_packages_java_single.json similarity index 100% rename from tests.orig/test_packages_java_single.json rename to tests/test_packages_java_single.json diff --git a/tests.orig/test_packages_node_package.json b/tests/test_packages_node_package.json similarity index 100% rename from tests.orig/test_packages_node_package.json rename to tests/test_packages_node_package.json diff --git a/tests.orig/test_packages_node_package_lock.json b/tests/test_packages_node_package_lock.json similarity index 100% rename from tests.orig/test_packages_node_package_lock.json rename to tests/test_packages_node_package_lock.json diff --git a/tests.orig/test_packages_node_yarn.lock b/tests/test_packages_node_yarn.lock similarity index 100% rename from tests.orig/test_packages_node_yarn.lock rename to tests/test_packages_node_yarn.lock diff --git a/tests.orig/test_packages_nuget.csproj b/tests/test_packages_nuget.csproj similarity index 100% rename from tests.orig/test_packages_nuget.csproj rename to tests/test_packages_nuget.csproj diff --git a/tests.orig/test_packages_nuget_packages_lock.json b/tests/test_packages_nuget_packages_lock.json similarity index 100% rename from tests.orig/test_packages_nuget_packages_lock.json rename to tests/test_packages_nuget_packages_lock.json diff --git a/tests.orig/test_packages_python_pipfile.lock b/tests/test_packages_python_pipfile.lock similarity index 100% rename from tests.orig/test_packages_python_pipfile.lock rename to tests/test_packages_python_pipfile.lock diff --git a/tests.orig/test_packages_python_requirements.txt b/tests/test_packages_python_requirements.txt similarity index 100% rename from tests.orig/test_packages_python_requirements.txt rename to tests/test_packages_python_requirements.txt diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 3c6a443..c77e6fe 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -1,23 +1,19 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - pluginTag = "public-api-test-plugin" +def test(): + response = cli(["plugins", "list"]) - response = cli_command(capsys, ["plugins", "get"]) - if any(plugin['tag'] == pluginTag for plugin in response['plugins']): - cli(["plugins", "delete", "-t", pluginTag]) + if any(plugin['tag'] == 'cli-test-plugin' for plugin in response['plugins']): + cli(["plugins", "delete", "-t", "cli-test-plugin"]) - cli_command(capsys, ["plugins", "create", "-f", "data/run-time/test_plugins.json"]) + cli(["plugins", "create", "-f", "data/import/plugins/cli-test-plugin.json"]) + response = cli(["plugins", "list"]) + assert any(plugin['tag'] == 'cli-test-plugin' for plugin in response['plugins']), "Plugin named cli-test-plugin should be in list of plugins" - response = cli_command(capsys, ["plugins", "get"]) - assert any(plugin['tag'] == pluginTag for plugin in response['plugins']), "Plugin " + plugin + " returned in get" + cli(["plugins", "replace", "-t", "cli-test-plugin", "-f", "tests/test_plugins_update.json"]) + response = cli(["plugins", "get", "-t", "cli-test-plugin"]) + assert response['tag'] == "cli-test-plugin", "Plugin named cli-test-plugin should be returned by get" - cli_command(capsys, ["plugins", "update", "-t", pluginTag, "-f", "data/run-time//test_plugins_update.json"]) - - response = cli_command(capsys, ["plugins", "get-by-tag", "-t", pluginTag]) - assert response['tag'] == pluginTag, "Plugin " + plugin + " returned by get-by-tag" - assert response['description'] == "Just testing plugin updates", "Plugin " + plugin + " description updated" - - cli(["-q", "plugins", "delete", "-t", pluginTag]) - response = cli_command(capsys, ["plugins", "get"]) - assert not any(plugin['tag'] == pluginTag for plugin in response['plugins']), "Plugin " + plugin + " returned in get" + cli(["plugins", "delete", "-t", "cli-test-plugin"]) + response = cli(["plugins", "list"]) + assert not(any(plugin['tag'] == 'cli-test-plugin' for plugin in response['plugins'])), "Plugin named cli-test-plugin should have been deleted" diff --git a/tests/test_plugins_invalid.py b/tests/test_plugins_invalid.py deleted file mode 100644 index 0c6fe55..0000000 --- a/tests/test_plugins_invalid.py +++ /dev/null @@ -1,18 +0,0 @@ -from common import * - -# Using a key with viewer role should be Forbidden. -@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) -def test(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "plugins", "create", "-f", "data/run-time/test_plugins_manager.json"]) - out, err = capsys.readouterr() - - assert out == "Forbidden", "Attempt to create plugin as a VIEWER with minimumRole defined as MANAGER should be Forbidden" - assert excinfo.value.code == 403, "VIEWER role cannot create plugin with minimumRole defined as MANAGER" - - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "plugins", "create", "-f", "data/run-time/test_plugins_invalid_role.json"]) - out, err = capsys.readouterr() - - assert out == "Bad Request", "Invalid minimumRole results in Bad Request" - assert excinfo.value.code == 400, "Invalid minimumRole should result in 400 return code" diff --git a/tests/test_plugins_update.json b/tests/test_plugins_update.json new file mode 100644 index 0000000..7f41141 --- /dev/null +++ b/tests/test_plugins_update.json @@ -0,0 +1,18 @@ +{ + "blob": "

This is my updated simple plugin

", + "contexts": [ + { + "type": "GLOBAL" + }, + { + "entityFilter": { + "type": "SERVICE_FILTER" + }, + "type": "ENTITY" + } + ], + "description": "Simple Updated Plugin", + "isDraft": false, + "minimumRoleRequired": "VIEWER", + "name": "My Updated Test Plugin" +} diff --git a/tests/test_queries.py b/tests/test_queries.py new file mode 100644 index 0000000..54fa2a1 --- /dev/null +++ b/tests/test_queries.py @@ -0,0 +1,16 @@ +from tests.helpers.utils import * +from urllib.error import HTTPError + +def test(): + try: + response = cli(["queries", "run", "-f", "tests/test_queries.txt"]) + except HTTPError as e: + status_code = e.response.status_code + if status_code == "409": + print("Query with same CQL is already running") + except: + print("Got an error for which I was not prepared. It's me. Not you.") + else: + job_id = response["jobId"] + response = cli(["queries", "get", "-j", job_id]) + assert response["queryDetails"]['jobId'] == job_id, "Should return query with same jobId returned by queries run" diff --git a/tests.orig/test_queries.txt b/tests/test_queries.txt similarity index 100% rename from tests.orig/test_queries.txt rename to tests/test_queries.txt diff --git a/tests/test_scim.py b/tests/test_scim.py new file mode 100644 index 0000000..3215747 --- /dev/null +++ b/tests/test_scim.py @@ -0,0 +1,34 @@ +from tests.helpers.utils import * +from urllib.error import HTTPError +import pytest + +def test(): + response = cli(["scim", "list"], ReturnType.STDOUT) + + if "HTTP Error 403" in response: + print("SCIM not set up or API key does not have permissions, not running test.") + print("API should probably return something other than a 403 when SCIM isn't set up") + print("because it's not possible to determine if this is a setup or permissions issue.") + else: + response = cli(["scim", "list"]) + total_results = response['totalResults'] + assert total_results >= 0, "Total results should be returned as an integer" + print("total results = " + str(total_results)) + + if total_results > 0: + assert any(user['userName'] == 'jeff.schnitter@proton.me' for user in response['Resources']), "Should find user jeff.schnitter@proton.me" + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me"]) + assert response['Resources'][0]['userName'] == 'jeff.schnitter@proton.me', "Should find user jeff.schnitter@proton.me" + id = response['Resources'][0]['id'] + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-a", "name.familyName"]) + assert 'familyName' in response['Resources'][0]['name'].keys(), "Should find familyName in response" + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-e", "name.familyName"]) + assert 'familyName' not in response['Resources'][0]['name'].keys(), "Should not have familyName in response" + + response = cli(["scim", "get", "--id", id]) + assert response['id'] == id, "Should find matching id based on query" + else: + print("Not running any scim tests, which is lucky because I have not thought of a good way to make these tests generic.") diff --git a/tests/test_scorecards.py b/tests/test_scorecards.py index 2c6bc86..9ae94e3 100644 --- a/tests/test_scorecards.py +++ b/tests/test_scorecards.py @@ -1,30 +1,107 @@ -from common import * +from tests.helpers.utils import * +import yaml -@pytest.mark.skipif(enable_cql_v2() == False, reason="Account flag ENABLE_CQL_V2 is not not set.") -def test_scorecards(capsys): - scorecardTag = "public-api-test-scorecard" - entityTag = "user-profile-metadata-service" +# Get rule id to be used in exemption tests. +# TODO: check for and revoke any PENDING exemptions. +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY']}) +def _get_rule(title): + response = cli(["scorecards", "get", "-s", "cli-test-scorecard"]) + rule_id = [rule['identifier'] for rule in response['scorecard']['rules'] if rule['title'] == title] + return rule_id[0] - response = cli_command(capsys, ["scorecards", "create", "-f", "data/run-time/scorecard.yaml"]) - assert response['scorecard']['tag'] == scorecardTag, "Scorecard with tag public-api-test-scorecard should be created" +def test_scorecards(): + cli(["scorecards", "create", "-f", "data/import/scorecards/cli-test-scorecard.yaml"]) - response = cli_command(capsys, ["scorecards", "list"]) - assert any(scorecard['tag'] == scorecardTag for scorecard in response['scorecards']), scorecard + " should be in list of scorecards" + response = cli(["scorecards", "list"]) + assert any(scorecard['tag'] == 'cli-test-scorecard' for scorecard in response['scorecards']), "Should find scorecard with tag cli-test-scorecard" - response = cli_command(capsys, ["scorecards", "shield", "-s", scorecardTag, "-t", entityTag]) - # Dear future (hopefully smarter) self, feel free to enhance the regex to search for the correct brackets and parentheses in the regular expression. - assert re.search(".*Public API Test Scorecard.*https://img.shields.io.*", response['value']), "Value includes scorecard name and shields URL" + response = cli(["scorecards", "shield", "-s", "cli-test-scorecard", "-t", "cli-test-service"]) + assert "img.shields.io" in response['value'], "shields url should be included in string" - response = cli_command(capsys, ["scorecards", "get", "-t", scorecardTag]) - assert response['scorecard']['tag'] == scorecardTag, "Can retrieve tag of scorecard" - assert response['scorecard']['levels'][0]['level']['name'] == 'Gold', "Can retrieve level name defined in scorecard" + response = cli(["scorecards", "get", "-s", "cli-test-scorecard"]) + assert response['scorecard']['tag'] == "cli-test-scorecard", "JSON response should have scorecard tag" - response = cli_command(capsys, ["scorecards", "descriptor", "-t", scorecardTag], "text") - assert yaml.safe_load(response)['tag'] == scorecardTag, "Can get tag from YAML descriptor" - -# cli(["scorecards", "next-steps", "-t", "public-api-test-scorecard", "-e", "user-profile-metadata-service"]) + response = cli(["scorecards", "descriptor", "-s", "cli-test-scorecard"], return_type=ReturnType.STDOUT) + assert "Used to test Cortex CLI" in response, "description of scorecard found in descriptor" + + # cannot rely on a scorecard evaluation being complete, so not performing any validation + cli(["scorecards", "next-steps", "-s", "cli-test-scorecard", "-t", "cli-test-service"]) + # cannot rely on a scorecard evaluation being complete, so not performing any validation + #response = cli(["scorecards", "scores", "-s", "cli-test-scorecard", "-t", "cli-test-service"]) + #assert response['scorecardTag'] == "cli-test-scorecard", "Should get valid response that include cli-test-scorecard" + # # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. -# cli(["scorecards", "scores", "-t", "public-api-test-scorecard", "-e", "user-profile-metadata-service"]) +# # 2024-05-06, additionally now blocked by CET-8882 +# # cli(["scorecards", "scores", "-t", "cli-test-scorecard", "-e", "cli-test-service"]) +# +# cli(["scorecards", "scores", "-t", "cli-test-scorecard"]) -# cli(["scorecards", "scores", "-t", "public-api-test-scorecard"]) +def test_scorecards_drafts(): + cli(["scorecards", "create", "-f", "data/import/scorecards/cli-test-draft-scorecard.yaml"]) + + response = cli(["scorecards", "list", "-s"]) + assert any(scorecard['tag'] == 'cli-test-draft-scorecard' for scorecard in response['scorecards']) + + cli(["scorecards", "delete", "-s", "cli-test-draft-scorecard"]) + response = cli(["scorecards", "list", "-s"]) + assert not(any(scorecard['tag'] == 'cli-test-draft-scorecard' for scorecard in response['scorecards'])), "should not find deleted scorecard" + +# Challenges with testing exemptions: +# +# - exemptions require scorecards that have evaluated with failing rules; +# testing assumes no tenanted data, so this condition needs to be created as part of the test +# +# - there is no public API to force evaluation of a scorecard; can look into possibility of using +# an internal endpoint for this +# +# - could create a scorecard as part of the test and wait for it to complete, but completion time for +# evaluating a scorecard is non-deterministic and, as experienced with query API tests, completion +# time can be 15 minutes or more, which will increase the time to complete testing by a factor of 5x +# or more +# +# - exemptions requested by an API key with the Cortex ADMIN role are auto-approved, so the exemption must +# be requested with a key that has non-ADMIN privileges +# +# This means there are dependencies on running a test using a VIEWER role to request the exemption and a +# subsequent test using an ADMIN role to act on the exemption +# +# So this is how we'll roll for now . . . +# - Automated tests currently run in known tenants that have the 'cli-test-scorecard' in an evaluated state. +# - So we can semi-reliably count on an evaluated scorecard to exist. + +@pytest.fixture(scope='session') +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) +def test_exemption_that_will_be_approved(): + rule_id = _get_rule("Has Custom Data") + print("rule_id = " + rule_id) + + response = cli(["scorecards", "exemptions", "request", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "test approve", "-ri", rule_id, "-d", "100"]) + assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" + +@pytest.mark.usefixtures('test_exemption_that_will_be_approved') +def test_approve_exemption(): + rule_id = _get_rule("Has Custom Data") + print("rule_id = " + rule_id) + + response = cli(["scorecards", "exemptions", "approve", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-ri", rule_id]) + assert response['exemptions'][0]['exemptionStatus']['status'] == 'APPROVED', "exemption state should be APPROVED" + response = cli(["scorecards", "exemptions", "revoke", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "I revoke you", "-ri", rule_id]) + assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" + +@pytest.fixture(scope='session') +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) +def test_exemption_that_will_be_denied(): + rule_id = _get_rule("Is Definitely False") + print("rule_id = " + rule_id) + + response = cli(["scorecards", "exemptions", "request", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "test deny", "-ri", rule_id, "-d", "100"]) + assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" + +@pytest.mark.usefixtures('test_exemption_that_will_be_denied') +def test_deny_exemption(): + rule_id = _get_rule("Is Definitely False") + print("rule_id = " + rule_id) + + response = cli(["scorecards", "exemptions", "deny", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "I deny, therefore I am", "-ri", rule_id]) + assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" diff --git a/tests/test_scorecards_drafts.py b/tests/test_scorecards_drafts.py deleted file mode 100644 index ae7c293..0000000 --- a/tests/test_scorecards_drafts.py +++ /dev/null @@ -1,11 +0,0 @@ -from common import * - -@pytest.mark.skipif(enable_cql_v2() == False, reason="Account flag ENABLE_CQL_V2 is not not set.") -def test(capsys): - cli_command(capsys, ["scorecards", "create", "-f", "data/run-time/scorecard_drafts.yaml"]) - - response = cli_command(capsys, ["scorecards", "list", "-s"]) - assert any(scorecard['tag'] == 'public-api-test-draft-scorecard' for scorecard in response['scorecards']), "Draft scorecards are returned with showDrafts query parameter" - - response = cli_command(capsys, ["scorecards", "list"]) - assert not any(scorecard['tag'] == 'public-api-test-draft-scorecard' for scorecard in response['scorecards']), "Draft scorecards are not returned without showDrafts query parameter" diff --git a/tests/test_stdin.py b/tests/test_stdin.py new file mode 100644 index 0000000..82857a3 --- /dev/null +++ b/tests/test_stdin.py @@ -0,0 +1,11 @@ +""" +Tests for stdin input. +""" +import subprocess + +def test_stdin_input(capsys): + cat_process = subprocess.Popen(['cat', 'data/import/catalog/cli-test-create-entity.yaml'], stdout=subprocess.PIPE) + cortex_process = subprocess.Popen(['cortexapps_cli/cli.py', 'catalog', 'create','-f-'],stdin=cat_process.stdout, stdout=subprocess.PIPE) + out, err = cortex_process.communicate() + rc=cortex_process.wait() + assert rc == 0, "catalog test with stdin should succeed" diff --git a/tests/test_teams.py b/tests/test_teams.py deleted file mode 100644 index 385c68b..0000000 --- a/tests/test_teams.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["teams", "list"]) - assert any(team['teamTag'] == 'payments-team' for team in response['teams']) diff --git a/tests/test_workflows.py b/tests/test_workflows.py new file mode 100644 index 0000000..5c046fc --- /dev/null +++ b/tests/test_workflows.py @@ -0,0 +1,12 @@ +from tests.helpers.utils import * +import yaml + +def test(): + cli(["workflows", "create", "-f", "data/import/workflows/cli-test-workflow.yaml"]) + + response = cli(["workflows", "list"]) + assert any(workflow['tag'] == 'cli-test-workflow' for workflow in response['workflows']), "Should find workflow with tag cli-test-workflow" + + response = cli(["workflows", "get", "-t", "cli-test-workflow"]) + + response = cli(["workflows", "delete", "-t", "cli-test-workflow"])