From cb6cd2e93a0adee15569607af0cde099f381c22e Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Wed, 18 Sep 2024 22:46:52 -0400 Subject: [PATCH 01/56] teams commands --- cortexapps_cli/cli.py | 78 +++ cortexapps_cli/commands/teams.py | 192 ++++++ cortexapps_cli/cortex.py | 1078 +++++++++++++++--------------- cortexapps_cli/cortex_client.py | 55 ++ cortexapps_cli/models/team.py | 202 ++++++ poetry.lock | 122 +++- pyproject.toml | 2 + 7 files changed, 1189 insertions(+), 540 deletions(-) create mode 100644 cortexapps_cli/cli.py create mode 100644 cortexapps_cli/commands/teams.py create mode 100644 cortexapps_cli/cortex_client.py create mode 100644 cortexapps_cli/models/team.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py new file mode 100644 index 0000000..93d8632 --- /dev/null +++ b/cortexapps_cli/cli.py @@ -0,0 +1,78 @@ +import typer +from typing_extensions import Annotated + +import os +import sys +import importlib.metadata +import tomllib +import configparser + +from cortexapps_cli.cortex_client import CortexClient +import cortexapps_cli.commands.teams as teams + +app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) + +# add subcommands +app.add_typer(teams.app, name="teams") + +# global options +@app.callback() +def global_callback(ctx: typer.Context, + # verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable verbose mode"), + api_key: str = typer.Option(None, "--api-key", "-k", help="API key", envvar="CORTEX_API_KEY"), + url: str = typer.Option("https://api.getcortexapp.com", "--url", "-u", help="Base URL for the API", envvar="CORTEX_BASE_URL"), + config_file: str = typer.Option(os.path.join(os.path.expanduser('~'), '.cortex', 'config'), "--config", "-c", help="Config file path", envvar="CORTEX_CONFIG"), + tenant: str = typer.Option("default", "--tenant", "-t", help="Tenant alias", envvar="CORTEX_TENANT_ALIAS"), + ): + + if not ctx.obj: + ctx.obj = {} + + if not os.path.isfile(config_file): + # no config file found + if not api_key: + raise typer.BadParameter("No API key provided and no config file found") + create_config = False + + # check if we are in a terminal, if so, ask the user if they want to create a config file + if sys.stdin.isatty() and sys.stdout.isatty(): + create_config = typer.confirm("No config file found. Do you want to create one?") + + if create_config: + os.makedirs(os.path.dirname(config_file), exist_ok=True) + with open(config_file, "w") as f: + f.write(f"[{tenant}]\n") + f.write(f"api_key = {api_key}\n") + f.write(f"base_url = {url}\n") + else: + # config file found + # if api_key is provided, use that in preference to the config file + if not api_key: + config = configparser.ConfigParser() + config.read(config_file) + if tenant not in config: + raise typer.BadParameter(f"Tenant {tenant} not found in config file") + api_key = config[tenant]["api_key"] + url = config[tenant]["base_url"] or url + + # strip any quotes or spaces from the api_key and url + api_key = api_key.strip('"\' ') + url = url.strip('"\' /') + + # ctx.obj["verbose"] = verbose + ctx.obj["api_key"] = api_key + ctx.obj["base_url"] = url + ctx.obj["client"] = CortexClient(api_key, url) + +@app.command() +def version(): + try: + with open("pyproject.toml", "rb") as f: + pyproject = tomllib.load(f) + version = pyproject["tool"]["poetry"]["version"] + except Exception as e: + version = importlib.metadata.version('cortexapps_cli') + print(version) + +if __name__ == "__main__": + app() diff --git a/cortexapps_cli/commands/teams.py b/cortexapps_cli/commands/teams.py new file mode 100644 index 0000000..09930fa --- /dev/null +++ b/cortexapps_cli/commands/teams.py @@ -0,0 +1,192 @@ +from typing import Optional +from typing_extensions import Annotated +import typer +import json +from rich import print, print_json +from enum import Enum + +from cortexapps_cli.models.team import Team + +app = typer.Typer() + +class TeamType(str, Enum): + CORTEX = "CORTEX" + IDP = "IDP" + +@app.command() +def create( + ctx: typer.Context, + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, + tag: str = typer.Option(None, "--tag", "-t", help="Team tag"), + type: TeamType = typer.Option(TeamType.CORTEX, "--type", "-y", help="Team type"), + name: str = typer.Option(None, "--name", "-n", help="Team name"), + description: str = typer.Option(None, "--description", "-d", help="Team description"), + summary: str = typer.Option(None, "--summary", "-s", help="Team summary"), + idp_group: str = typer.Option(None, "--idp-group", "-g", help="IDP group - required when type is IDP"), + idp_provider: str = typer.Option(None, "--idp-provider", "-p", help="IDP provider - required when type is IDP"), +): + """ + Create a team + + Use -f to specify a file to read the team definition from (use -f - to read from stdin.) Or, provide the team attributes directly via the command options. + """ + client = ctx.obj["client"] + if team_input: + if tag or name or description or summary or idp_group or idp_provider: + raise typer.BadParameter("When providing a team definition file, do not specify any other team attributes") + data = json.loads("".join([line for line in team_input])) + else: + if not tag: + raise typer.BadParameter("tag is required if team definition is not provided") + if not name: + raise typer.BadParameter("name is required if team definition is not provided") + + data = { + "type": type, + "teamTag": tag, + "links": [], + "metadata": { + "name": name, + }, + "slackChannels": [], + "cortexTeam": { + "members": [] + }, + } + + if description: + data["metadata"]["description"] = description + + if summary: + data["metadata"]["summary"] = summary + + if type == TeamType.IDP: + if not idp_group: + raise typer.BadParameter("idp-group is required when type is IDP") + if not idp_provider: + raise typer.BadParameter("idp-provider is required when type is IDP") + data["idpGroup"] = { + "group": idp_group, + "provider": idp_provider, + } + + r = client.post("api/v1/teams", data=data) + print_json(json.dumps(r)) + +@app.command() +def list( + ctx: typer.Context, + include_teams_without_members: bool = typer.Option(False, "--include-teams-without-members", help="Include teams without members"), +): + """ + List teams + + Provide a team tag to list one team, or list all teams if no tag is provided. + """ + client = ctx.obj["client"] + params = { + "includeTeamsWithoutMembers": include_teams_without_members, + } + r = client.get("api/v1/teams", params=params) + print_json(json.dumps(r)) + +@app.command() +def get( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Get a team + """ + client = ctx.obj["client"] + r = client.get(f"api/v1/teams/{team_tag}") + print_json(json.dumps(r)) + +@app.command() +def delete( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Delete a team + """ + client = ctx.obj["client"] + client.delete(f"api/v1/teams/{team_tag}") + +@app.command() +def archive( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Archive a team + """ + client = ctx.obj["client"] + r = client.put(f"api/v1/teams/{team_tag}/archive") + print_json(json.dumps(r)) + +@app.command() +def unarchive( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="Team tag"), +): + """ + Unarchive a team + """ + client = ctx.obj["client"] + r = client.put(f"api/v1/teams/{team_tag}/unarchive") + print_json(json.dumps(r)) + +@app.command() +def update( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="The tag of the team to update"), + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, + name: str = typer.Option(None, "--name", "-n", help="Team name"), + description: str = typer.Option(None, "--description", "-d", help="Team description"), + summary: str = typer.Option(None, "--summary", "-s", help="Team summary"), +): + """ + Update team + """ + client = ctx.obj["client"] + if team_input: + if name or description or summary: + raise typer.BadParameter("When providing a team definition file, do not specify any other team attributes") + team = Team.from_json("".join([line for line in team_input])) + else: + team = Team.from_obj(client.get(f"api/v1/teams/{team_tag}")) + if name: + team.metadata.name = name + if description: + team.metadata.description = description + if summary: + team.metadata.summary = summary + r = client.put(f"api/v1/teams/{team_tag}", data=team.to_obj()) + print_json(json.dumps(r)) + +@app.command("update-metadata") +def update_metadata( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="The tag of the team to update"), + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, +): + """ + Update team metadata + """ + + # all the update methods seem to do the same thing when reading from a file + update(ctx, team_tag=team_tag, team_input=team_input, name=None, description=None, summary=None) + +@app.command("update-members") +def update_members( + ctx: typer.Context, + team_tag: str = typer.Option(..., "--team-tag", "-t", help="The tag of the team to update"), + team_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the team definition from")] = None, +): + """ + Update team members + """ + + # all the update methods seem to do the same thing when reading from a file + update(ctx, team_tag=team_tag, team_input=team_input, name=None, description=None, summary=None) diff --git a/cortexapps_cli/cortex.py b/cortexapps_cli/cortex.py index 1e69026..2d7f79b 100755 --- a/cortexapps_cli/cortex.py +++ b/cortexapps_cli/cortex.py @@ -19,7 +19,7 @@ import tomllib import yaml -config={} +config = {} # borrowed from https://github.com/python-poetry/poetry/issues/273 def version(): @@ -36,30 +36,30 @@ def version(): # # Potentially, you can override the help class to customize this, https://medium.com/@george.shuklin/simple-implementation-of-help-command-a634711b70e def validate_input(argv, parser): - if len(argv)==0: - parser.print_help() - sys.exit(2) - + if len(argv) == 0: + parser.print_help() + sys.exit(2) + if argv[0] == "-h" or argv[0] == "--help" or argv[0] == "-v" or argv[0] == "--version": - return + return if argv[0] == "integrations": - if len(argv) == 1: - print("ERROR! Command provided with no parameters.\n") - print("Try " + argv[0] + " -h for help") - sys.exit(2) - if argv[1] == "-h" or argv[1] == "--help": - return - if len(argv) == 2: - print("ERROR! Command provided with no parameters.\n") - print("Try " + argv[0] + " " + argv[1] + " -h for help") - sys.exit(2) - return + if len(argv) == 1: + print("ERROR! Command provided with no parameters.\n") + print("Try " + argv[0] + " -h for help") + sys.exit(2) + if argv[1] == "-h" or argv[1] == "--help": + return + if len(argv) == 2: + print("ERROR! Command provided with no parameters.\n") + print("Try " + argv[0] + " " + argv[1] + " -h for help") + sys.exit(2) + return if len(argv) == 1: - print("ERROR! Command provided with no parameters.\n") - print("Try " + argv[0] + " -h for help") - sys.exit(2) + print("ERROR! Command provided with no parameters.\n") + print("Try " + argv[0] + " -h for help") + sys.exit(2) def read_file(args): # Check if file was passed as stdin @@ -78,7 +78,7 @@ def read_json_from_yaml(args): with open(args.file.name, 'rb') as f: data = yaml.safe_load(f) - return json.dumps({ "spec": "" + str(data) + "" }) + return json.dumps({"spec": "" + str(data) + ""}) def check_config_file(config_file, replace_string): if not os.path.isfile(config_file): @@ -86,7 +86,7 @@ def check_config_file(config_file, replace_string): response = input() if response == "Y" or response == "y": if not os.path.isdir(os.path.dirname(config_file)): - os.mkdir(os.path.dirname(config_file), 0o700) + os.mkdir(os.path.dirname(config_file), 0o700) cortex_config_contents = textwrap.dedent('''\ [default] api_key = {replace} @@ -102,7 +102,7 @@ def check_config_file(config_file, replace_string): sys.exit(0) else: sys.exit(0) - + # If CORTEX_API_KEY environment variable is defined, will not check for existence of a cortex # config file. def get_config(config, args, argv, parser, replace_string): @@ -143,442 +143,442 @@ def get_config(config, args, argv, parser, replace_string): config.update({"debug": args.debug}) config.update({"noObfuscate": args.noObfuscate}) - #args = parser.parse_args(argv) + # args = parser.parse_args(argv) - #return args + # return args def add_argument_accountId(subparser): subparser.add_argument( - '-a', - '--accountId', - help='AWS account Id', - required=True, - default=True, - metavar='' + '-a', + '--accountId', + help='AWS account Id', + required=True, + default=True, + metavar='' ) def add_argument_alias(subparser, help_text="The github configuration alias defined in Cortex"): subparser.add_argument( - '-a', - '--alias', - help=help_text, - required=True, - default=True, - metavar='' + '-a', + '--alias', + help=help_text, + required=True, + default=True, + metavar='' ) def add_argument_callee_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the callee entity.'): subparser.add_argument( - '-e', - '--calleeTag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' + '-e', + '--calleeTag', + help=help_text, + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_caller_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the caller entity.'): subparser.add_argument( - '-r', - '--callerTag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' + '-r', + '--callerTag', + help=help_text, + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_departmentTag(subparser): subparser.add_argument( - '-d', - '--departmentTag', - help='The department entity tag', - required=True, - default=True, - metavar='' + '-d', + '--departmentTag', + help='The department entity tag', + required=True, + default=True, + metavar='' ) def add_argument_discovery_audit_type(subparser): subparser.add_argument( - '-t', - '--type', - help='Filter based on type of the vent', - required=False, - choices=['NEW_REPOSITORY', 'REPOSITORY_DELETED', 'REPOSITORY_ARCHIVED', 'NEW_K8S_RESOURCE', 'NEW_APM_RESOURCE', 'APM_RESOURCE_NOT_DETECTED', 'NEW_ECS_RESOURCE', 'ECS_RESOURCE_NOT_DETECTED', 'NEW_AWS_RESOURCE', 'AWS_RESOURCE_NOT_DETECTED', 'NEW_GOOGLE_CLOUD_RESOURCE', 'GOOGLE_CLOUD_RESOURCE_NOT_DETECTED'], - default=argparse.SUPPRESS, - metavar='' + '-t', + '--type', + help='Filter based on type of the vent', + required=False, + choices=['NEW_REPOSITORY', 'REPOSITORY_DELETED', 'REPOSITORY_ARCHIVED', 'NEW_K8S_RESOURCE', 'NEW_APM_RESOURCE', 'APM_RESOURCE_NOT_DETECTED', 'NEW_ECS_RESOURCE', 'ECS_RESOURCE_NOT_DETECTED', 'NEW_AWS_RESOURCE', 'AWS_RESOURCE_NOT_DETECTED', 'NEW_GOOGLE_CLOUD_RESOURCE', 'GOOGLE_CLOUD_RESOURCE_NOT_DETECTED'], + default=argparse.SUPPRESS, + metavar='' ) def add_argument_discovery_audit_source(subparser): subparser.add_argument( - '-s', - '--source', - help='Filter based on integration source', - required=False, - choices=['AWS', 'AZURE_DEVOPS', 'BITBUCKET', 'DATADOG', 'DYNATRACE', 'ECS', 'GCP', 'GITHUB', 'GITLAB', 'INSTANA', 'K8S', 'LIGHTSTEP', 'LAMBDA', 'NEWRELIC', 'SERVICENOW'], - default=argparse.SUPPRESS, - metavar='' + '-s', + '--source', + help='Filter based on integration source', + required=False, + choices=['AWS', 'AZURE_DEVOPS', 'BITBUCKET', 'DATADOG', 'DYNATRACE', 'ECS', 'GCP', 'GITHUB', 'GITLAB', 'INSTANA', 'K8S', 'LIGHTSTEP', 'LAMBDA', 'NEWRELIC', 'SERVICENOW'], + default=argparse.SUPPRESS, + metavar='' ) def add_argument_end_time(subparser, help_text='End time for audit log retrieve'): subparser.add_argument( - '-e', - '--endTime', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-e', + '--endTime', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_entity_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the entity.', required=True): subparser.add_argument( - '-e', - '--entityTag', - help=help_text, - required=required, - default=argparse.SUPPRESS, - metavar='' + '-e', + '--entityTag', + help=help_text, + required=required, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_environment(subparser, help_text='The environment name of the deployment to delete.'): subparser.add_argument( - '-e', - '--environment', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-e', + '--environment', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_export_directory(subparser): subparser.add_argument( - '-d', - '--directory', - help="Directory where export will be created; defaults to ~/.cortex/export/", - required=False, - default=os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), - metavar='' + '-d', + '--directory', + help="Directory where export will be created; defaults to ~/.cortex/export/", + required=False, + default=os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), + metavar='' ) subparser.add_argument( - '-default-directory', - '--default-directory', - help=argparse.SUPPRESS, - required=False, - default=os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + '-default-directory', + '--default-directory', + help=argparse.SUPPRESS, + required=False, + default=os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S") ) def add_argument_file(subparser, help_text): subparser.add_argument( - '-f', - '--file', - required=True, - help=help_text + "; can be passed as stdin with -, example: -f-", - default=argparse.SUPPRESS, - type=argparse.FileType('r'), - metavar='' + '-f', + '--file', + required=True, + help=help_text + "; can be passed as stdin with -, example: -f-", + default=argparse.SUPPRESS, + type=argparse.FileType('r'), + metavar='' ) def add_argument_force(subparser, help_text='When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed.'): subparser.add_argument( - '-o', - '--force', - help=help_text, - action='store_true', - default='false' + '-o', + '--force', + help=help_text, + action='store_true', + default='false' ) def add_argument_groups(subparser): subparser.add_argument( - '-g', - '--groups', - help='Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups', - default=argparse.SUPPRESS, - metavar='' + '-g', + '--groups', + help='Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups', + default=argparse.SUPPRESS, + metavar='' ) def add_argument_hierarchyDepth(subparser): subparser.add_argument( - '-d', - '--hierarchy-depth', - help='Depth of the parent / children hierarchy nodes. Can be \'full\' or a valid integer', - default='full', - metavar='' + '-d', + '--hierarchy-depth', + help='Depth of the parent / children hierarchy nodes. Can be \'full\' or a valid integer', + default='full', + metavar='' ) def add_argument_id(subparser, help_text='The id of the CQL query'): subparser.add_argument( - '-i', - '--id', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' + '-i', + '--id', + help=help_text, + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_import_directory(subparser): subparser.add_argument( - '-d', - '--directory', - help="Directory containing export contents", - required=True, - default=argparse.SUPPRESS, - metavar='' + '-d', + '--directory', + help="Directory containing export contents", + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_includeDrafts(subparser, help_text='Include plugin drafts.'): subparser.add_argument( - '-i', - '--includeDrafts', - help=help_text, - required=False, - default=True, - action='store_true' + '-i', + '--includeDrafts', + help=help_text, + required=False, + default=True, + action='store_true' ) def add_argument_includeHierarchyFields(subparser): subparser.add_argument( - '-i', - '--includeHierarchyFields', - help='List of sub fields to include for hierarchies. Only supports \'groups\'', - required=False, - default=argparse.SUPPRESS, - metavar='' + '-i', + '--includeHierarchyFields', + help='List of sub fields to include for hierarchies. Only supports \'groups\'', + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_includeIncoming(subparser, help_text='Including incoming dependencies.'): subparser.add_argument( - '-i', - '--includeIncoming', - help=help_text, - required=False, - default=True, - action='store_true' + '-i', + '--includeIncoming', + help=help_text, + required=False, + default=True, + action='store_true' ) def add_argument_includeIgnored(subparser, help_text='Flag to include ignored events in result.'): subparser.add_argument( - '-i', - '--includeIgnored', - help=help_text, - required=False, - default=False, - action='store_true' + '-i', + '--includeIgnored', + help=help_text, + required=False, + default=False, + action='store_true' ) def add_argument_includeOutgoing(subparser, help_text='Including outgoing dependencies.'): subparser.add_argument( - '-o', - '--includeOutgoing', - help=help_text, - required=False, - default=False, - action='store_true' + '-o', + '--includeOutgoing', + help=help_text, + required=False, + default=False, + action='store_true' ) def add_argument_includeTeamsWithoutMembers(subparser): subparser.add_argument( - '-i', - '--includeTeamsWithoutMembers', - help='Include teams without members', - required=False, - default=False, - action='store_true' + '-i', + '--includeTeamsWithoutMembers', + help='Include teams without members', + required=False, + default=False, + action='store_true' ) def add_argument_key(subparser, help_text='Key to retrieve.'): subparser.add_argument( - '-k', - '--key', - help=help_text, - required=True, - metavar='' + '-k', + '--key', + help=help_text, + required=True, + metavar='' ) def add_argument_method(subparser, help_text='The http method type of the dependency.'): subparser.add_argument( - '-m', - '--method', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-m', + '--method', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_name(subparser, help_text='The name of the thing'): subparser.add_argument( - '-n', - '--name', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' + '-n', + '--name', + help=help_text, + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_page(subparser, help_text='Page number to return, 0 indexed'): subparser.add_argument( - '-p', - '--page', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-p', + '--page', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_page_size(subparser, help_text='Page size for results'): subparser.add_argument( - '-z', - '--pageSize', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-z', + '--pageSize', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_path(subparser, help_text='The path of the dependency.'): subparser.add_argument( - '-p', - '--path', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-p', + '--path', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_role(subparser): subparser.add_argument( - '-r', - '--role', - help='AWS role', - required=True, - default=True, - metavar='' + '-r', + '--role', + help='AWS role', + required=True, + default=True, + metavar='' ) def add_argument_scorecard_tag(subparser): subparser.add_argument( - '-s', - '--scorecardTag', - help='Unique tag for the Scorecard', - required=True, - default=argparse.SUPPRESS, - metavar='' + '-s', + '--scorecardTag', + help='Unique tag for the Scorecard', + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_sha(subparser, help_text='The sha string of the deployment to delete.'): subparser.add_argument( - '-s', - '--sha', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-s', + '--sha', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_show_drafts(subparser, help_text='Include draft scorecards'): subparser.add_argument( - '-s', - '--showDrafts', - help=help_text, - required=False, - action='store_true' + '-s', + '--showDrafts', + help=help_text, + required=False, + action='store_true' ) def add_argument_start_time(subparser, help_text='Start time for audit log retrieve'): subparser.add_argument( - '-s', - '--startTime', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-s', + '--startTime', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the entity.'): subparser.add_argument( - '-t', - '--tag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' + '-t', + '--tag', + help=help_text, + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_teamTag(subparser, help_text='The tag identifing the team.'): subparser.add_argument( - '-t', - '--teamTag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' + '-t', + '--teamTag', + help=help_text, + required=True, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_timestamp(subparser, help_text='Date-time of events to include.'): subparser.add_argument( - '-i', - '--timestamp', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' + '-i', + '--timestamp', + help=help_text, + required=False, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_timeout(subparser, help_text='Time in seconds to wait before timeout.'): subparser.add_argument( - '-x', - '--timeout', - help=help_text, - required=False, - default=120, - metavar='' + '-x', + '--timeout', + help=help_text, + required=False, + default=120, + metavar='' ) def add_argument_type(subparser, option="-t", help_text='The resource type.', required=True): subparser.add_argument( - option, - '--type', - help=help_text, - required=required, - default=argparse.SUPPRESS, - metavar='' + option, + '--type', + help=help_text, + required=required, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_types(subparser, option="-t", help_text='Comma-separated list of entity types.', required=True, default=argparse.SUPPRESS): subparser.add_argument( - option, - '--types', - help=help_text, - required=required, - default=default, - metavar='' + option, + '--types', + help=help_text, + required=required, + default=default, + metavar='' ) def add_argument_uuid(subparser, option="-u", help_text='UUID of custom event.', required=True): subparser.add_argument( - option, - '--uuid', - help=help_text, - required=required, - default=argparse.SUPPRESS, - metavar='' + option, + '--uuid', + help=help_text, + required=required, + default=argparse.SUPPRESS, + metavar='' ) def add_argument_wait(subparser, help_text='Wait for query to complete.'): subparser.add_argument( - '-w', - '--wait', - help=help_text, - required=False, - default=False, - action='store_true' + '-w', + '--wait', + help=help_text, + required=False, + default=False, + action='store_true' ) def add_argument_yaml(subparser, help_text='When true, returns the YAML representation of the descriptor.'): subparser.add_argument( - '-y', - '--yaml', - help=help_text, - action='store_true', - default=False, - required=False + '-y', + '--yaml', + help=help_text, + action='store_true', + default=False, + required=False ) def debug_json(r, method): @@ -609,7 +609,7 @@ def exit(r, method, expected_rc=200, err=None): print(f'{method} {r.url} => {r.status_code} {r.reason}') if err != None: print(err) - + if not config.get('is_importing', False) or r.status_code != 409 or r.status_code != 400: sys.exit(r.status_code) else: @@ -630,12 +630,12 @@ def default_headers(content_type='application/json', other={}): content_type = content_type + ";" + charset_utf8 h = { - 'Content-Type': content_type + 'Content-Type': content_type } - + for k, v in other.items(): h[k] = v - + return h # There might be a more efficient use of the requests library to combine @@ -652,8 +652,8 @@ def get(url, headers={}): err = e.response.text if err != "": if details in e.response.json(): - details = e.response.json()['details'] - exit(r, 'GET', err = details) + details = e.response.json()['details'] + exit(r, 'GET', err=details) def put(url, headers={}, payload=""): @@ -683,7 +683,7 @@ def post(url, headers={}, payload="", expected_rc=200): err = None try: - r = requests.post(config['url'] + url, headers=headers,data=payload) + r = requests.post(config['url'] + url, headers=headers, data=payload) r.raise_for_status() except requests.exceptions.RequestException as e: err = e.response.text @@ -698,10 +698,10 @@ def parse_opts(args, ignore_tags=[]): if k in ['tenant', 'debug', 'noObfuscate', 'func', 'config'] + ignore_tags: continue if len(opts) == 0: - char="?" + char = "?" else: - char="&" - opts=opts + char + k + "=" + str(v) + char = "&" + opts = opts + char + k + "=" + str(v) # convert python args to valid JSON return opts.replace("True", "true").replace("False", "false") @@ -747,11 +747,11 @@ def export(args): if args.directory == args.default_directory: args.directory = args.directory + "-" + args.tenant - catalog_directory=args.directory + "/catalog" - json_directory=args.directory + "/json" - scorecard_directory=args.directory + "/scorecards" - teams_directory=args.directory + "/teams" - resource_definitions_directory=args.directory + "/resource-definitions" + catalog_directory = args.directory + "/catalog" + json_directory = args.directory + "/json" + scorecard_directory = args.directory + "/scorecards" + teams_directory = args.directory + "/teams" + resource_definitions_directory = args.directory + "/resource-definitions" directory_list = [catalog_directory, json_directory, resource_definitions_directory, scorecard_directory, teams_directory] @@ -760,7 +760,7 @@ def export(args): os.makedirs(directory, exist_ok=True) print("Getting resource definitions") - resource_definitions_json=json_directory + "/resource-definitions.json" + resource_definitions_json = json_directory + "/resource-definitions.json" resource_definitions_output = io.StringIO() with redirect_stdout(resource_definitions_output): resource_definitions_list(args) @@ -772,10 +772,10 @@ def export(args): resource_types_list = [] for t in data['definitions']: resource_types_list.append(t['type']) - + for resource_type in sorted(resource_types_list): print("--> " + resource_type) - resource_file=resource_definitions_directory + "/" + resource_type + ".json" + resource_file = resource_definitions_directory + "/" + resource_type + ".json" args.type = resource_type resource_definition_output = io.StringIO() with redirect_stdout(resource_definition_output): @@ -790,7 +790,7 @@ def export(args): args.yaml = True while not this_page == total_pages: - descriptors=json_directory + "/descriptors-" + str(this_page) + ".json" + descriptors = json_directory + "/descriptors-" + str(this_page) + ".json" catalog_output = io.StringIO() with redirect_stdout(catalog_output): save_types = args.types @@ -820,7 +820,7 @@ def export(args): if any(export_type == "ALL" or export_type == "ip-allowlist" for export_type in args.types.split()): print("Getting IP Allowlist definitions") - ip_allowlist_json=json_directory + "/ip-allowlist.json" + ip_allowlist_json = json_directory + "/ip-allowlist.json" ip_allowlist_output = io.StringIO() with redirect_stdout(ip_allowlist_output): ip_allowlist_get(args) @@ -829,7 +829,7 @@ def export(args): if any(export_type == "ALL" or export_type == "scorecard" for export_type in args.types.split()): print("Getting scorecards") - scorecards_json=json_directory + "/scorecards.json" + scorecards_json = json_directory + "/scorecards.json" scorecards_output = io.StringIO() with redirect_stdout(scorecards_output): scorecards_list(args) @@ -845,8 +845,8 @@ def export(args): for tag in sorted(scorecard_list): print("--> " + tag) - scorecard_file=scorecard_directory + "/" + tag + ".yaml" - args.tag=tag + scorecard_file = scorecard_directory + "/" + tag + ".yaml" + args.tag = tag scorecards_descriptor_output = io.StringIO() with redirect_stdout(scorecards_descriptor_output): scorecards_descriptor(args) @@ -907,7 +907,7 @@ def import_from_export(args): with redirect_stdout(resource_definitions_output): resource_definitions_list(args) resource_definitions_json = json.loads(resource_definitions_output.getvalue()) - + print("Importing resource definitions") for file in sorted(os.listdir(resource_definitions_directory)): print("--> " + file) @@ -985,10 +985,10 @@ def catalog_archive(args): def subparser_catalog_create_or_update(subparser): sp = subparser.add_parser( - 'create', - help='Create a catalog entity using a descriptor YAML. If the YAML refers to an entity that already exists (as referenced by the x-cortex-tag), this API will update the existing entity.', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + 'create', + help='Create a catalog entity using a descriptor YAML. If the YAML refers to an entity that already exists (as referenced by the x-cortex-tag), this API will update the existing entity.', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Additional documentation ------------------------ https://docs.cortex.io/docs/reference/basics/entities @@ -996,18 +996,18 @@ def subparser_catalog_create_or_update(subparser): ''')) add_argument_file(sp, 'File containing openapi descriptor for entity') sp.add_argument( - '-d', - '--dry-run', - help='When true, this endpoint only validates the descriptor contents and returns any errors or warnings.', - action='store_true', - default='false' + '-d', + '--dry-run', + help='When true, this endpoint only validates the descriptor contents and returns any errors or warnings.', + action='store_true', + default='false' ) sp.add_argument( - '-g', - '--github-pr', - help='Add a comment with validation errors on the pull request with the given ID', - default=argparse.SUPPRESS, - metavar='' + '-g', + '--github-pr', + help='Add a comment with validation errors on the pull request with the given ID', + default=argparse.SUPPRESS, + metavar='' ) sp.set_defaults(func=catalog_create_or_update) @@ -1043,73 +1043,73 @@ def catalog_gitops_logs(args): def subparser_catalog_list(subparser): sp = subparser.add_parser( - 'list', - help='List all entities across the Service, Resource and Domain Catalogs.\n This API returns summary data for each entity, so refer to the retrieve entity method to lookup more details for a single entity.' + 'list', + help='List all entities across the Service, Resource and Domain Catalogs.\n This API returns summary data for each entity, so refer to the retrieve entity method to lookup more details for a single entity.' ) sp.add_argument( - '-a', - '--includeArchived', - help='Whether to include archived entities in the response, default to false', - default=False, - action='store_true', - required=False + '-a', + '--includeArchived', + help='Whether to include archived entities in the response, default to false', + default=False, + action='store_true', + required=False ) add_argument_hierarchyDepth(sp) add_argument_groups(sp) add_argument_includeHierarchyFields(sp) sp.add_argument( - '-in', - '--includeNestedFields', - help='List of sub fields to include for different types, for example team:members', - default=argparse.SUPPRESS, - metavar='', - required=False + '-in', + '--includeNestedFields', + help='List of sub fields to include for different types, for example team:members', + default=argparse.SUPPRESS, + metavar='', + required=False ) sp.add_argument( - '-io', - '--includeOwners', - help='Whether to include ownership information for each entity in the response', - default=False, - action='store_true', - required=False + '-io', + '--includeOwners', + help='Whether to include ownership information for each entity in the response', + default=False, + action='store_true', + required=False ) sp.add_argument( - '-l', - '--includeLinks', - help='Whether to include links for each entity in the response', - default=False, - action='store_true', - required=False + '-l', + '--includeLinks', + help='Whether to include links for each entity in the response', + default=False, + action='store_true', + required=False ) sp.add_argument( - '-m', - '--includeMetadata', - help='Whether to include custom data for each entity in the response', - default=False, - action='store_true', - required=False + '-m', + '--includeMetadata', + help='Whether to include custom data for each entity in the response', + default=False, + action='store_true', + required=False ) sp.add_argument( - '-o', - '--owners', - help='Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names', - default=argparse.SUPPRESS, - metavar='' + '-o', + '--owners', + help='Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names', + default=argparse.SUPPRESS, + metavar='' ) add_argument_page(sp) sp.add_argument( - '-r', - '--gitRepositories', - help='Supports only GitHub repositories in the org/repo format', - default=argparse.SUPPRESS, - metavar='' + '-r', + '--gitRepositories', + help='Supports only GitHub repositories in the org/repo format', + default=argparse.SUPPRESS, + metavar='' ) sp.add_argument( - '-t', - '--types', - help='Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.', - default=argparse.SUPPRESS, - metavar='' + '-t', + '--types', + help='Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.', + default=argparse.SUPPRESS, + metavar='' ) add_argument_page_size(sp) sp.set_defaults(func=catalog_list) @@ -1176,9 +1176,9 @@ def subparser_custom_data_opts(subparsers): subparser_custom_data_list(sp) def subparser_custom_data_add(subparser): - sp = subparser.add_parser('add', help='Add custom data for entity', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add', help='Add custom data for entity', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -1272,9 +1272,9 @@ def subparser_custom_events_opts(subparsers): subparser_custom_events_update_by_uuid(sp) def subparser_custom_events_create(subparser): - sp = subparser.add_parser('create', help='Create custom event', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('create', help='Create custom event', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -1303,7 +1303,7 @@ def custom_events_create(args): post("/api/v1/catalog/" + args.tag + "/custom-events", default_headers(), read_file(args)) def subparser_custom_events_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all custom events for an entity') + sp = subparser.add_parser('delete-all', help='Delete all custom events for an entity') add_argument_tag(sp) add_argument_type(sp, option='-y', help_text='The custom event type, defaults to all types', required=False) add_argument_timestamp(sp) @@ -1313,7 +1313,7 @@ def custom_events_delete_all(args): delete("/api/v1/catalog/" + args.tag + "/custom-events" + parse_opts(args), expected_rc=204) def subparser_custom_events_list(subparser): - sp = subparser.add_parser('list', help='List custom events for entity') + sp = subparser.add_parser('list', help='List custom events for entity') add_argument_tag(sp) add_argument_type(sp, option='-y', help_text='The custom event type, defaults to all types', required=False) add_argument_timestamp(sp) @@ -1323,7 +1323,7 @@ def custom_events_list(args): get("/api/v1/catalog/" + args.tag + "/custom-events" + parse_opts(args)) def subparser_custom_events_delete_by_uuid(subparser): - sp = subparser.add_parser('delete-by-uuid', help='Delete custom events by UUID') + sp = subparser.add_parser('delete-by-uuid', help='Delete custom events by UUID') add_argument_tag(sp) add_argument_uuid(sp) sp.set_defaults(func=custom_events_delete_by_uuid) @@ -1332,7 +1332,7 @@ def custom_events_delete_by_uuid(args): delete("/api/v1/catalog/" + args.tag + "/custom-events/" + args.uuid, expected_rc=204) def subparser_custom_events_get_by_uuid(subparser): - sp = subparser.add_parser('get-by-uuid', help='Get custom event by UUID') + sp = subparser.add_parser('get-by-uuid', help='Get custom event by UUID') add_argument_tag(sp) add_argument_uuid(sp) sp.set_defaults(func=custom_events_get_by_uuid) @@ -1341,7 +1341,7 @@ def custom_events_get_by_uuid(args): get("/api/v1/catalog/" + args.tag + "/custom-events/" + args.uuid) def subparser_custom_events_update_by_uuid(subparser): - sp = subparser.add_parser('update-by-uuid', help='Update custom event by UUID') + sp = subparser.add_parser('update-by-uuid', help='Update custom event by UUID') add_argument_tag(sp) add_argument_uuid(sp) add_argument_file(sp, 'File containing custom event to create') @@ -1366,7 +1366,7 @@ def subparser_groups_add(subparser): sp.set_defaults(func=groups_add) def groups_add(args): - put("/api/v1/catalog/"+ args.tag + "/groups", default_headers(), payload=read_file(args)) + put("/api/v1/catalog/" + args.tag + "/groups", default_headers(), payload=read_file(args)) def subparser_groups_delete(subparser): sp = subparser.add_parser('delete', help='Delete group from entity') @@ -1376,7 +1376,7 @@ def subparser_groups_delete(subparser): def groups_delete(args): headers = default_headers() - delete("/api/v1/catalog/"+ args.tag + "/groups", headers, read_file(args)) + delete("/api/v1/catalog/" + args.tag + "/groups", headers, read_file(args)) def subparser_groups_get(subparser): sp = subparser.add_parser('get', help='Get groups for entity') @@ -1384,7 +1384,7 @@ def subparser_groups_get(subparser): sp.set_defaults(func=groups_get) def groups_get(args): - get("/api/v1/catalog/"+ args.tag + "/groups") + get("/api/v1/catalog/" + args.tag + "/groups") # Groups end # Dependencies start @@ -1403,9 +1403,9 @@ def subparser_dependencies_opts(subparsers): def subparser_dependencies_add(subparser): sp = subparser.add_parser('add', - help='Create dependency from an entity', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Create dependency from an entity', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -1428,9 +1428,9 @@ def dependencies_add(args): def subparser_dependencies_add_in_bulk(subparser): sp = subparser.add_parser('add-in-bulk', - help='Create or update dependencies in bulk', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Create or update dependencies in bulk', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -1478,9 +1478,9 @@ def dependencies_delete_all(args): def subparser_dependencies_delete_in_bulk(subparser): sp = subparser.add_parser('delete-in-bulk', - help='Delete dependencies in bulk', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Delete dependencies in bulk', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -1630,10 +1630,10 @@ def subparser_discovery_audit_opts(subparsers): subparser_discovery_audit_get(sp) def subparser_discovery_audit_get(subparser): - sp = subparser.add_parser('get', - help="This report shows you recent changes in your environment that aren't reflected in Cortex, including newly created repositories, services, and resources that we discover from your integrations or which were deleted in the environment but corresponding Cortex entities are still present.Add a deployment to an entity", - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('get', + help="This report shows you recent changes in your environment that aren't reflected in Cortex, including newly created repositories, services, and resources that we discover from your integrations or which were deleted in the environment but corresponding Cortex entities are still present.Add a deployment to an entity", + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Possible values for source, type: --------------------------------- source: @@ -1745,7 +1745,7 @@ def subparser_groups_add(subparser): def groups_add(args): headers = default_headers() - put("/api/v1/catalog/"+ args.tag + "/groups", headers, payload=read_file(args)) + put("/api/v1/catalog/" + args.tag + "/groups", headers, payload=read_file(args)) def subparser_groups_delete(subparser): sp = subparser.add_parser('delete', help='Delete group from entity') @@ -1755,7 +1755,7 @@ def subparser_groups_delete(subparser): def groups_delete(args): headers = default_headers() - delete("/api/v1/catalog/"+ args.tag + "/groups", headers, read_file(args)) + delete("/api/v1/catalog/" + args.tag + "/groups", headers, read_file(args)) def subparser_groups_get(subparser): sp = subparser.add_parser('get', help='Get groups for entity') @@ -1763,7 +1763,7 @@ def subparser_groups_get(subparser): sp.set_defaults(func=groups_get) def groups_get(args): - get("/api/v1/catalog/"+ args.tag + "/groups") + get("/api/v1/catalog/" + args.tag + "/groups") # Groups end # Integrations start @@ -1859,7 +1859,7 @@ def subparser_integrations_aws_add(subparser): def integrations_aws_add(args): headers = default_headers() - payload="{ \"accountId\": \"" + args.accountId + "\", \"role\": \"" + args.role + "\"}" + payload = "{ \"accountId\": \"" + args.accountId + "\", \"role\": \"" + args.role + "\"}" post("/api/v1/aws/configurations", headers, payload=payload) def subparser_integrations_aws_update(subparser): @@ -1904,9 +1904,9 @@ def subparser_integrations_azure_resources_opts(subparser): def subparser_integrations_azure_resources_add(subparser): sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Add a single configuration', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -1926,10 +1926,10 @@ def integrations_azure_resources_add(args): post("/api/v1/azure-resources/configuration", headers, payload=read_file(args)) def subparser_integrations_azure_resources_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2040,10 +2040,10 @@ def integrations_coralogix_add(args): post("/api/v1/coralogix/configuration/", headers, payload=read_file(args)) def subparser_integrations_coralogix_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2143,10 +2143,10 @@ def subparser_integrations_github_opts(subparser): subparser_integrations_github_validate_all(sp) def subparser_integrations_github_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add', + help='Add a single configuration', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2168,10 +2168,10 @@ def integrations_github_add(args): post("/api/v1/github/configurations/app", headers, payload=read_file(args)) def subparser_integrations_github_add_personal(subparser): - sp = subparser.add_parser('add-personal', - help='Add a single personal configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-personal', + help='Add a single personal configuration', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2310,10 +2310,10 @@ def integrations_gitlab_add(args): post("/api/v1/gitlab/configuration/", headers, payload=read_file(args)) def subparser_integrations_gitlab_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2424,10 +2424,10 @@ def integrations_datadog_add(args): post("/api/v1/datadog/configuration/", headers, payload=read_file(args)) def subparser_integrations_datadog_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2528,10 +2528,10 @@ def subparser_integrations_incidentio_opts(subparser): subparser_integrations_incidentio_validate_all(sp) def subparser_integrations_incidentio_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add', + help='Add a single configuration', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2548,10 +2548,10 @@ def integrations_incidentio_add(args): post("/api/v1/incidentio/configuration", headers, payload=read_file(args)) def subparser_integrations_incidentio_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2664,10 +2664,10 @@ def integrations_launchdarkly_add(args): post("/api/v1/launchdarkly/configuration/", headers, payload=read_file(args)) def subparser_integrations_launchdarkly_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2764,10 +2764,10 @@ def subparser_integrations_newrelic_opts(subparser): subparser_integrations_newrelic_validate_all(sp) def subparser_integrations_newrelic_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add', + help='Add a single configuration', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2786,10 +2786,10 @@ def integrations_newrelic_add(args): post("/api/v1/newrelic/configuration", headers, payload=read_file(args)) def subparser_integrations_newrelic_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -2897,10 +2897,10 @@ def integrations_prometheus_add(args): post("/api/v1/prometheus/configuration/", headers, payload=read_file(args)) def subparser_integrations_prometheus_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3047,10 +3047,10 @@ def integrations_sonarqube_add(args): post("/api/v1/sonarqube/configuration/", headers, payload=read_file(args)) def subparser_integrations_sonarqube_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('add-multiple', + help='Add multiple configurations', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3148,9 +3148,9 @@ def ip_allowlist_get(args): def subparser_ip_allowlist_replace(subparser): sp = subparser.add_parser('replace', - help='Replace allowlist of IP addresses and ranges', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Replace allowlist of IP addresses and ranges', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3171,9 +3171,9 @@ def ip_allowlist_replace(args): def subparser_ip_allowlist_validate(subparser): sp = subparser.add_parser('validate', - help='Validate allowlist of IP addresses and ranges', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Validate allowlist of IP addresses and ranges', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3240,7 +3240,7 @@ def subparser_packages_list_packages(subparser): sp.set_defaults(func=packages_list_packages) def packages_list_packages(args): - get("/api/v1/catalog/"+ args.tag + "/packages") + get("/api/v1/catalog/" + args.tag + "/packages") def subparser_packages_go_opts(subparser): sp = subparser.add_subparsers(help='Go package sub-commands.') @@ -3256,7 +3256,7 @@ def subparser_packages_upload_go(subparser): def packages_upload_go(args): headers = default_headers('application/text') - post("/api/v1/catalog/"+ args.tag + "/packages/go/gosum", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/go/gosum", headers, read_file(args)) def subparser_packages_delete_go(subparser): sp = subparser.add_parser('delete', help='Delete go package from entity') @@ -3265,7 +3265,7 @@ def subparser_packages_delete_go(subparser): sp.set_defaults(func=packages_delete_go) def packages_delete_go(args): - delete("/api/v1/catalog/"+ args.tag + "/packages/go" + parse_opts(args)) + delete("/api/v1/catalog/" + args.tag + "/packages/go" + parse_opts(args)) def subparser_packages_java_opts(subparser): sp = subparser.add_subparsers(help='Java package sub-commands.') @@ -3282,7 +3282,7 @@ def subparser_packages_upload_java_single(subparser): def packages_upload_java_single(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/java", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/java", headers, read_file(args)) def subparser_packages_upload_java_multiple(subparser): sp = subparser.add_parser('upload-multiple', help='Upload multiple java packages') @@ -3292,7 +3292,7 @@ def subparser_packages_upload_java_multiple(subparser): def packages_upload_java_multiple(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/java/bulk", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/java/bulk", headers, read_file(args)) def subparser_packages_delete_java(subparser): sp = subparser.add_parser('delete', help='Delete java package from entity') @@ -3301,7 +3301,7 @@ def subparser_packages_delete_java(subparser): sp.set_defaults(func=packages_delete_java) def packages_delete_java(args): - delete("/api/v1/catalog/"+ args.tag + "/packages/java" + parse_opts(args)) + delete("/api/v1/catalog/" + args.tag + "/packages/java" + parse_opts(args)) def subparser_packages_python_opts(subparser): sp = subparser.add_subparsers(help='Python package sub-commands.') @@ -3318,7 +3318,7 @@ def subparser_packages_upload_python_pipfile(subparser): def packages_upload_python_pipfile(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/python/pipfile", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/python/pipfile", headers, read_file(args)) def subparser_packages_upload_python_requirements(subparser): sp = subparser.add_parser('upload-requirements', help='Upload python requirements.txt file') @@ -3328,7 +3328,7 @@ def subparser_packages_upload_python_requirements(subparser): def packages_upload_python_requirements(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/python/requirements", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/python/requirements", headers, read_file(args)) def subparser_packages_delete_python(subparser): sp = subparser.add_parser('delete', help='Delete python package from entity') @@ -3337,7 +3337,7 @@ def subparser_packages_delete_python(subparser): sp.set_defaults(func=packages_delete_python) def packages_delete_python(args): - delete("/api/v1/catalog/"+ args.tag + "/packages/python" + parse_opts(args)) + delete("/api/v1/catalog/" + args.tag + "/packages/python" + parse_opts(args)) def subparser_packages_node_opts(subparser): sp = subparser.add_subparsers(help='Node package sub-commands.') @@ -3355,7 +3355,7 @@ def subparser_packages_upload_node_package(subparser): def packages_upload_node_package(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/node/package-json", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/node/package-json", headers, read_file(args)) def subparser_packages_upload_node_package_lock(subparser): sp = subparser.add_parser('upload-package-lock', help='Upload node package-lock.json file') @@ -3365,7 +3365,7 @@ def subparser_packages_upload_node_package_lock(subparser): def packages_upload_node_package_lock(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/node/package-lock", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/node/package-lock", headers, read_file(args)) def subparser_packages_upload_node_yarn_lock(subparser): sp = subparser.add_parser('upload-yarn-lock', help='Upload yarn.lock file') @@ -3375,7 +3375,7 @@ def subparser_packages_upload_node_yarn_lock(subparser): def packages_upload_node_yarn_lock(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/node/yarn-lock", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/node/yarn-lock", headers, read_file(args)) def subparser_packages_delete_node(subparser): sp = subparser.add_parser('delete', help='Delete node package from entity') @@ -3384,7 +3384,7 @@ def subparser_packages_delete_node(subparser): sp.set_defaults(func=packages_delete_node) def packages_delete_node(args): - delete("/api/v1/catalog/"+ args.tag + "/packages/node" + parse_opts(args)) + delete("/api/v1/catalog/" + args.tag + "/packages/node" + parse_opts(args)) def subparser_packages_nuget_opts(subparser): sp = subparser.add_subparsers(help='NuGet package sub-commands.') @@ -3401,7 +3401,7 @@ def subparser_packages_upload_nuget_csproj(subparser): def packages_upload_nuget_csproj(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/dotnet/nuget/csproj", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/dotnet/nuget/csproj", headers, read_file(args)) def subparser_packages_upload_nuget_packages_lock(subparser): sp = subparser.add_parser('upload-packages-lock', help='Upload Nuget packages.lock.json') @@ -3411,7 +3411,7 @@ def subparser_packages_upload_nuget_packages_lock(subparser): def packages_upload_nuget_packages_lock(args): headers = default_headers() - post("/api/v1/catalog/"+ args.tag + "/packages/dotnet/nuget/packages-lock", headers, read_file(args)) + post("/api/v1/catalog/" + args.tag + "/packages/dotnet/nuget/packages-lock", headers, read_file(args)) def subparser_packages_delete_nuget(subparser): sp = subparser.add_parser('delete', help='Delete nuget package from entity') @@ -3420,7 +3420,7 @@ def subparser_packages_delete_nuget(subparser): sp.set_defaults(func=packages_delete_nuget) def packages_delete_nuget(args): - delete("/api/v1/catalog/"+ args.tag + "/packages/dotnet/nuget" + parse_opts(args)) + delete("/api/v1/catalog/" + args.tag + "/packages/dotnet/nuget" + parse_opts(args)) # Packages end # Plugins start @@ -3436,9 +3436,9 @@ def subparser_plugins_opts(subparsers): def subparser_plugins_create(subparser): sp = subparser.add_parser('create', - help='Create a new plugin', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Create a new plugin', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3493,9 +3493,9 @@ def plugins_get_by_tag(args): def subparser_plugins_update(subparser): sp = subparser.add_parser('update', - help='Create a new plugin', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Create a new plugin', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3535,8 +3535,8 @@ def subparser_queries_opts(subparsers): def subparser_queries_run(subparser): sp = subparser.add_parser('run', help='Run CQL query', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Query input can be provided either as a JSON-formatted file or in a file containing the query text that would be used in Query Builder in the Cortex UI. In the latter case, the text will be converted into the expected JSON format @@ -3571,46 +3571,46 @@ def subparser_queries_run(subparser): def queries_run(args): headers = default_headers() if hasattr(args, "wait"): - query_output = io.StringIO() - with redirect_stdout(query_output): - delattr(args, 'wait') - queries_run(args) - out = json.loads(query_output.getvalue()) - - jobId = out['jobId'] - sleep_interval = 2 - max_attempts = int(args.timeout)//sleep_interval - args.id = jobId - - done = False - for attempt in range(1, max_attempts): - query_check_output = io.StringIO() - with redirect_stdout(query_check_output): - queries_get(args) - out = json.loads(query_check_output.getvalue()) - status = out['status'] - if status == "DONE": - done = True - break - else: - if attempt == max_attempts: - break - time.sleep(sleep_interval) - - if not done: - print("failed to find job id " + jobId + " in DONE state within " + str(args.timeout) + " seconds") - print(str(out)) - sys.exit(2) - else: - print(str(json.dumps(out))) + query_output = io.StringIO() + with redirect_stdout(query_output): + delattr(args, 'wait') + queries_run(args) + out = json.loads(query_output.getvalue()) + + jobId = out['jobId'] + sleep_interval = 2 + max_attempts = int(args.timeout) // sleep_interval + args.id = jobId + + done = False + for attempt in range(1, max_attempts): + query_check_output = io.StringIO() + with redirect_stdout(query_check_output): + queries_get(args) + out = json.loads(query_check_output.getvalue()) + status = out['status'] + if status == "DONE": + done = True + break + else: + if attempt == max_attempts: + break + time.sleep(sleep_interval) + + if not done: + print("failed to find job id " + jobId + " in DONE state within " + str(args.timeout) + " seconds") + print(str(out)) + sys.exit(2) + else: + print(str(json.dumps(out))) else: - # Support input being in JSON format or bare CQL. - payload = read_file(args) - if payload[0] != "{": - data = {} - data['query'] = payload - payload = str(json.dumps(data)) - post("/api/v1/queries", headers, payload=payload) + # Support input being in JSON format or bare CQL. + payload = read_file(args) + if payload[0] != "{": + data = {} + data['query'] = payload + payload = str(json.dumps(data)) + post("/api/v1/queries", headers, payload=payload) def subparser_queries_get(subparser): sp = subparser.add_parser('get', help='Get results of a CQL query') @@ -3634,9 +3634,9 @@ def subparser_resource_definitions_opts(subparsers): def subparser_resource_definitions_create(subparser): sp = subparser.add_parser('create', - help='Create definition', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + help='Create definition', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3825,10 +3825,10 @@ def subparser_teams_hierarchies_opts(subparsers): subparser_teams_hierarchies_relationships(sp) def subparser_teams_hierarchies_create(subparser): - sp = subparser.add_parser('create', - help='Create a department', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + sp = subparser.add_parser('create', + help='Create a department', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Format of JSON-formatted configuration file: -------------------------------------------- { @@ -3964,17 +3964,17 @@ def teams_update_members(args): # if args comes in from sys.argv. def cli(argv=sys.argv[1:]): parser = argparse.ArgumentParser( - prog='cortex CLI', - description='Cortex command line interface', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ + prog='cortex CLI', + description='Cortex command line interface', + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent('''\ Type 'man cortex' for additional details. ''')) parser.add_argument('-c', '--config', help='Config location, default = ~/.cortex/config', default=os.path.expanduser('~') + '/.cortex/config') parser.add_argument('-d', '--debug', help='Writes request debug information as JSON to stderr', action='store_true') parser.add_argument('-n', '--noObfuscate', help='Do not obfuscate bearer token when debugging', action='store_true') parser.add_argument('-q', '--quiet', help='Suppress warning messages when overriding tenant settings with environment variables', action='store_true') - parser.add_argument('-t', '--tenant', default='default', help='tenant name defined in ~/.cortex/config, defaults to \'default\'',metavar='') + parser.add_argument('-t', '--tenant', default='default', help='tenant name defined in ~/.cortex/config, defaults to \'default\'', metavar='') parser.add_argument('-v', '--version', action='version', version=version()) sp = parser.add_subparsers(help='sub-command help') @@ -4003,7 +4003,7 @@ def cli(argv=sys.argv[1:]): replace_string = "REPLACE_WITH_YOUR_CORTEX_API_KEY" validate_input(argv, parser) args = parser.parse_args(argv) - #args = get_config(config, args, argv, parser, replace_string) + # args = get_config(config, args, argv, parser, replace_string) get_config(config, args, argv, parser, replace_string) args.func(args) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py new file mode 100644 index 0000000..dbaf784 --- /dev/null +++ b/cortexapps_cli/cortex_client.py @@ -0,0 +1,55 @@ +import requests +import json +import typer +from rich import print + +class CortexClient: + def __init__(self, api_key, base_url='https://api.getcortexapp.com'): + self.api_key = api_key + self.base_url = base_url + + def request(self, method, endpoint, params={}, headers={}, data=None, raw=False): + req_headers = { + 'Authorization': f'Bearer {self.api_key}', + 'Content-Type': 'application/json', + **headers + } + url = '/'.join([self.base_url.rstrip('/'), endpoint.lstrip('/')]) + response = requests.request(method, url, params=params, headers=req_headers, json=data) + + if not response.ok: + try: + error = response.json() + status = response.status_code + message = error.get('message', 'Unknown error') + details = error.get('details', 'No details') + request_id = error.get('requestId', 'No request ID') + error_str = f'[red][bold]HTTP Error {status}[/bold][/red]: {message} - {details} [dim](Request ID: {request_id})[/dim]' + print(error_str) + raise typer.Exit(code=1) + except json.JSONDecodeError: + response.raise_for_status() + if raw: + return response + + try: + return response.json() + except json.JSONDecodeError: + if isinstance(response.text, str): + return response.text + elif isinstance(response.content, bytes): + return response.content + else: + return None + + def get(self, endpoint, params={}, headers={}, raw=False): + return self.request('GET', endpoint, params=params, headers=headers, raw=raw) + + def post(self, endpoint, data={}, headers={}, raw=False): + return self.request('POST', endpoint, data=data, headers=headers, raw=raw) + + def put(self, endpoint, data={}, headers={}, raw=False): + return self.request('PUT', endpoint, data=data, headers=headers, raw=raw) + + def delete(self, endpoint, headers={}, raw=False): + return self.request('DELETE', endpoint, headers=headers, raw=raw) diff --git a/cortexapps_cli/models/team.py b/cortexapps_cli/models/team.py new file mode 100644 index 0000000..5fdf8f3 --- /dev/null +++ b/cortexapps_cli/models/team.py @@ -0,0 +1,202 @@ +import json +from enum import Enum +from typing import List, Optional, Union + +class TeamType(Enum): + CORTEX = "CORTEX" + IDP = "IDP" + +class Team: + class Metadata: + def __init__(self, name: str, description: Optional[str] = None, summary: Optional[str] = None): + self.name = name + self.description = description + self.summary = summary + + @classmethod + def from_obj(cls, obj): + return cls( + name=obj['name'], + description=obj.get('description'), + summary=obj.get('summary') + ) + + def to_obj(self): + return { + 'name': self.name, + 'description': self.description, + 'summary': self.summary + } + + class Link: + def __init__(self, description: str, name: str, type: str, url: str): + self.description = description + self.name = name + self.type = type + self.url = url + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + class SlackChannel: + def __init__(self, description: str, name: str, notificationsEnabled: bool): + self.description = description + self.name = name + self.notificationsEnabled = notificationsEnabled + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + class CortexMember: + def __init__(self, email: str, name: str, description: Optional[str] = None, role: Optional[str] = None, notificationsEnabled: bool = True): + self.email = email + self.name = name + self.description = description + self.role = role + self.notificationsEnabled = notificationsEnabled + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + class CortexTeam: + def __init__(self, members: List['Team.CortexMember']): + self.members = members + + @classmethod + def from_obj(cls, obj): + return cls(members=[Team.CortexMember.from_obj(member) for member in obj['members']]) + + def to_obj(self): + return {'members': [member.to_obj() for member in self.members]} + + class IdpGroup: + def __init__(self, group: str, provider: str): + self.group = group + self.provider = provider + + @classmethod + def from_obj(cls, obj): + return cls(**obj) + + def to_obj(self): + return vars(self) + + + def __init__(self, + teamTag: str, + metadata_name: str, + type: TeamType, + id: Optional[str] = None, + links: Optional[List[Link]] = None, + slackChannels: Optional[List[SlackChannel]] = None, + cortexTeam: Optional['Team.CortexTeam'] = None, + idpGroup: Optional['Team.IdpGroup'] = None, + catalogEntityTag: Optional[str] = None, + metadata_description: Optional[str] = None, + metadata_summary: Optional[str] = None, + isArchived: bool = False + ): + + if type == TeamType.CORTEX and cortexTeam is None: + raise ValueError("cortexTeam.members must exist if type is 'CORTEX'") + + if type == TeamType.IDP and idpGroup is None: + raise ValueError("idpGroup must exist if type is 'IDP'") + + self.id = id + self.teamTag = teamTag + self.catalogEntityTag = catalogEntityTag or teamTag + self.metadata = self.Metadata(metadata_name, metadata_description, metadata_summary) + self.links = links or [] + self.slackChannels = slackChannels or [] + self.isArchived = isArchived + self.cortexTeam = cortexTeam + self.idpGroup = idpGroup + self.type = type + + @classmethod + def from_json(cls, data: Union[str, dict]): + if isinstance(data, str): + data = json.loads(data) + return cls.from_obj(data) + + def to_json(self): + return json.dumps(self.to_obj(), indent=4) + + @classmethod + def from_obj(cls, obj: dict): + type_enum = TeamType(obj['type']) + + cortex_team = None + idp_group = None + + if type_enum == TeamType.CORTEX: + cortex_team = cls.CortexTeam.from_obj(obj['cortexTeam']) + elif type_enum == TeamType.IDP: + idp_group = cls.IdpGroup.from_obj(obj['idpGroup']) + + links = [cls.Link.from_obj(link) for link in obj.get('links', [])] + slack_channels = [cls.SlackChannel.from_obj(channel) for channel in obj.get('slackChannels', [])] + + return cls( + teamTag=obj['teamTag'], + metadata_name=obj['metadata']['name'], + type=type_enum, + links=links, + slackChannels=slack_channels, + cortexTeam=cortex_team, + idpGroup=idp_group, + id=obj.get('id'), + catalogEntityTag=obj.get('catalogEntityTag'), + metadata_description=obj['metadata'].get('description'), + metadata_summary=obj['metadata'].get('summary'), + isArchived=obj.get('isArchived', False), + ) + + def to_obj(self): + data = { + "id": self.id, + "teamTag": self.teamTag, + "catalogEntityTag": self.catalogEntityTag, + "metadata": self.metadata.to_obj(), + "links": [link.to_obj() for link in self.links], + "slackChannels": [channel.to_obj() for channel in self.slackChannels], + "isArchived": self.isArchived, + "type": self.type.value + } + + if self.type == TeamType.CORTEX and self.cortexTeam: + data["cortexTeam"] = self.cortexTeam.to_obj() + + if self.type == TeamType.IDP and self.idpGroup: + data["idpGroup"] = self.idpGroup.to_obj() + + return data + + +def main(): + # Creating manually + team_manual = Team( + teamTag="retail2-partner-experience", + metadata_name="Retail2 Partner Experience", + type=TeamType.CORTEX, + links=[], + slackChannels=[], + cortexTeam=Team.CortexTeam(members=[]) + ) + print(json.dumps(team_manual.to_obj(), indent=4)) + +if (__name__ == "__main__"): + main() diff --git a/poetry.lock b/poetry.lock index 4095299..6890b5e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -110,6 +110,20 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -221,6 +235,41 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "packaging" version = "24.1" @@ -247,6 +296,20 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pytest" version = "8.2.2" @@ -405,6 +468,63 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "rich" +version = "13.8.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "typer" +version = "0.12.5" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, + {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + [[package]] name = "urllib3" version = "2.2.2" @@ -425,4 +545,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "351296b2fe9693563eb66b5c7ceb8d7b0e86cd3efb405418303dd923b7ed1bf7" +content-hash = "11dc2395310eafb764eefef1ed8366521219265f9820bb2c19be9ad210137ef0" diff --git a/pyproject.toml b/pyproject.toml index 6e3ed38..5e4b047 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,9 +20,11 @@ python = "^3.11" requests = ">= 2.32.3, < 3" pyyaml = ">= 6.0.1, < 7" urllib3 = ">= 2.2.2" +typer = "^0.12.5" [tool.poetry.scripts] cortex = "cortexapps_cli.cortex:cli" +cortex2 = "cortexapps_cli.cli:app" [tool.poetry.group.test.dependencies] pytest = "^8.2.2" From 2624db29d32b6009a08a215144bca72eca02f609 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Thu, 19 Sep 2024 13:59:56 -0400 Subject: [PATCH 02/56] add paginated fetch to client, start on catalog commands --- cortexapps_cli/cli.py | 21 +++++----- cortexapps_cli/commands/catalog.py | 59 +++++++++++++++++++++++++++++ cortexapps_cli/commands/teams.py | 8 ++-- cortexapps_cli/cortex_client.py | 61 +++++++++++++++++++++++++++++- cortexapps_cli/models/team.py | 1 - 5 files changed, 133 insertions(+), 17 deletions(-) create mode 100644 cortexapps_cli/commands/catalog.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 93d8632..cfb309d 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -8,23 +8,25 @@ import configparser from cortexapps_cli.cortex_client import CortexClient + import cortexapps_cli.commands.teams as teams +import cortexapps_cli.commands.catalog as catalog app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) # add subcommands app.add_typer(teams.app, name="teams") +app.add_typer(catalog.app, name="catalog") # global options @app.callback() -def global_callback(ctx: typer.Context, - # verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable verbose mode"), - api_key: str = typer.Option(None, "--api-key", "-k", help="API key", envvar="CORTEX_API_KEY"), - url: str = typer.Option("https://api.getcortexapp.com", "--url", "-u", help="Base URL for the API", envvar="CORTEX_BASE_URL"), - config_file: str = typer.Option(os.path.join(os.path.expanduser('~'), '.cortex', 'config'), "--config", "-c", help="Config file path", envvar="CORTEX_CONFIG"), - tenant: str = typer.Option("default", "--tenant", "-t", help="Tenant alias", envvar="CORTEX_TENANT_ALIAS"), - ): - +def global_callback( + ctx: typer.Context, + api_key: str = typer.Option(None, "--api-key", "-k", help="API key", envvar="CORTEX_API_KEY"), + url: str = typer.Option("https://api.getcortexapp.com", "--url", "-u", help="Base URL for the API", envvar="CORTEX_BASE_URL"), + config_file: str = typer.Option(os.path.join(os.path.expanduser('~'), '.cortex', 'config'), "--config", "-c", help="Config file path", envvar="CORTEX_CONFIG"), + tenant: str = typer.Option("default", "--tenant", "-t", help="Tenant alias", envvar="CORTEX_TENANT_ALIAS"), +): if not ctx.obj: ctx.obj = {} @@ -59,9 +61,6 @@ def global_callback(ctx: typer.Context, api_key = api_key.strip('"\' ') url = url.strip('"\' /') - # ctx.obj["verbose"] = verbose - ctx.obj["api_key"] = api_key - ctx.obj["base_url"] = url ctx.obj["client"] = CortexClient(api_key, url) @app.command() diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py new file mode 100644 index 0000000..cb1e25c --- /dev/null +++ b/cortexapps_cli/commands/catalog.py @@ -0,0 +1,59 @@ +import typer + +from rich import print_json + +app = typer.Typer() + +@app.command() +def list( + ctx: typer.Context, + include_archived: bool = typer.Option(False, "--include-archived", "-a", help="Include archived entities"), + hierarchy_depth: str = typer.Option('full', "--hierarchy-depth", "-d", help="Depth of the parent / children hierarchy nodes. Can be 'full' or a valid integer"), + groups: str = typer.Option(None, "--groups", "-g", help="Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups"), + owners: str = typer.Option(None, "--owners", "-o", help="Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names"), + include_hierarchy_fields: str = typer.Option(None, "--include-hierarchy-fields", "-i", help="List of sub fields to include for hierarchies. Only supports 'groups'"), + include_nested_fields: str = typer.Option(None, "--include-nested-fields", "-in", help="List of sub fields to include for different types, for example team:members"), + include_owners: bool = typer.Option(False, "--include-owners", "-io", help="Include ownership information for each entity in the response"), + include_links: bool = typer.Option(False, "--include-links", "-l", help="Include links for each entity in the response"), + include_metadata: bool = typer.Option(False, "--include-metadata", "-m", help="Include custom data for each entity in the response"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + git_repositories: str = typer.Option(None, "--git-repositories", "-r", help="Supports only GitHub repositories in the org/repo format"), + types: str = typer.Option(None, "--types", "-t", help="Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor."), +): + client = ctx.obj["client"] + + params = { + "includeArchived": include_archived, + "hierarchyDepth": hierarchy_depth, + "groups": groups, + "owners": owners, + "includeHierarchyFields": include_hierarchy_fields, + "includeNestedFields": include_nested_fields, + "includeOwners": include_owners, + "includeLinks": include_links, + "includeMetadata": include_metadata, + "page": page, + "pageSize": page_size, + "gitRepositories": git_repositories, + "types": types, + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # for keys that can have multiple values, remove whitespace around comma and split on comma + for key in ['groups', 'owners', 'gitRepositories', 'types']: + if key in params: + params[key] = [x.strip() for x in params[key].split(',')] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog", params=params) + pass + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog", params=params) + pass + + print_json(data=r) diff --git a/cortexapps_cli/commands/teams.py b/cortexapps_cli/commands/teams.py index 09930fa..c852a9f 100644 --- a/cortexapps_cli/commands/teams.py +++ b/cortexapps_cli/commands/teams.py @@ -99,7 +99,7 @@ def get( Get a team """ client = ctx.obj["client"] - r = client.get(f"api/v1/teams/{team_tag}") + r = client.get_entity(team_tag, 'team') print_json(json.dumps(r)) @app.command() @@ -111,7 +111,7 @@ def delete( Delete a team """ client = ctx.obj["client"] - client.delete(f"api/v1/teams/{team_tag}") + client.delete_entity(team_tag, 'team') @app.command() def archive( @@ -122,7 +122,7 @@ def archive( Archive a team """ client = ctx.obj["client"] - r = client.put(f"api/v1/teams/{team_tag}/archive") + r = client.archive_entity(team_tag, 'team') print_json(json.dumps(r)) @app.command() @@ -134,7 +134,7 @@ def unarchive( Unarchive a team """ client = ctx.obj["client"] - r = client.put(f"api/v1/teams/{team_tag}/unarchive") + r = client.unarchive_entity(team_tag, 'team') print_json(json.dumps(r)) @app.command() diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index dbaf784..0038b60 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -19,6 +19,7 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw=False) if not response.ok: try: + # try to parse the error message error = response.json() status = response.status_code message = error.get('message', 'Unknown error') @@ -28,10 +29,11 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw=False) print(error_str) raise typer.Exit(code=1) except json.JSONDecodeError: + # if we can't parse the error message, just raise the HTTP error response.raise_for_status() if raw: return response - + try: return response.json() except json.JSONDecodeError: @@ -53,3 +55,60 @@ def put(self, endpoint, data={}, headers={}, raw=False): def delete(self, endpoint, headers={}, raw=False): return self.request('DELETE', endpoint, headers=headers, raw=raw) + + def fetch(self, endpoint, params={}, headers={}): + # do paginated fetch, page number is indexed at 0 + # param page is page number, param pageSize is page size, default 250 + page = 0 + page_size = 250 + entities = [] + while True: + response = self.get(endpoint, params={**params, 'page': page, 'pageSize': page_size}, headers=headers) + if 'entities' not in response or not response['entities']: + break + entities.extend(response['entities']) + if response['totalPages'] == page + 1: + break + page += 1 + return { + "total": len(entities), + "page": 0, + "totalPages": 1 if entities else 0, + "entities": entities, + } + + def get_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.get(f'api/v1/{path_for_type}/{entity_tag}') + + def delete_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.delete(f'api/v1/{path_for_type}/{entity_tag}') + + def archive_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.put(f'api/v1/{path_for_type}/{entity_tag}/archive') + + def unarchive_entity(self, entity_tag: str, entity_type: str = ''): + match entity_type.lower(): + case 'team' | 'teams': + path_for_type = 'teams' + case _: + path_for_type = 'catalog' + + return self.put(f'api/v1/{path_for_type}/{entity_tag}/unarchive') diff --git a/cortexapps_cli/models/team.py b/cortexapps_cli/models/team.py index 5fdf8f3..778edba 100644 --- a/cortexapps_cli/models/team.py +++ b/cortexapps_cli/models/team.py @@ -185,7 +185,6 @@ def to_obj(self): return data - def main(): # Creating manually team_manual = Team( From cd0aaa98bd7daad1f228c0af16818df35ff0ca82 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Fri, 20 Sep 2024 14:50:22 -0400 Subject: [PATCH 03/56] fetch for different endpoint response shapes --- cortexapps_cli/cortex_client.py | 37 ++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 0038b60..c025393 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -8,6 +8,18 @@ def __init__(self, api_key, base_url='https://api.getcortexapp.com'): self.api_key = api_key self.base_url = base_url + def data_key_for_endpoint(self, endpoint): + end_endpoint = endpoint.split('/')[-1] + match end_endpoint: + case 'catalog': + return 'entities' + case 'audit-logs': + return 'logs' + case 'deploys': + return 'deployments' + case _: + return end_endpoint + def request(self, method, endpoint, params={}, headers={}, data=None, raw=False): req_headers = { 'Authorization': f'Bearer {self.api_key}', @@ -47,34 +59,35 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw=False) def get(self, endpoint, params={}, headers={}, raw=False): return self.request('GET', endpoint, params=params, headers=headers, raw=raw) - def post(self, endpoint, data={}, headers={}, raw=False): - return self.request('POST', endpoint, data=data, headers=headers, raw=raw) + def post(self, endpoint, data={}, params={}, headers={}, raw=False): + return self.request('POST', endpoint, data=data, params=params, headers=headers, raw=raw) - def put(self, endpoint, data={}, headers={}, raw=False): - return self.request('PUT', endpoint, data=data, headers=headers, raw=raw) + def put(self, endpoint, data={}, params={}, headers={}, raw=False): + return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw=raw) - def delete(self, endpoint, headers={}, raw=False): - return self.request('DELETE', endpoint, headers=headers, raw=raw) + def delete(self, endpoint, params={}, headers={}, raw=False): + return self.request('DELETE', endpoint, params=params, headers=headers, raw=raw) def fetch(self, endpoint, params={}, headers={}): # do paginated fetch, page number is indexed at 0 # param page is page number, param pageSize is page size, default 250 page = 0 page_size = 250 - entities = [] + data_key = self.data_key_for_endpoint(endpoint) + data = [] while True: response = self.get(endpoint, params={**params, 'page': page, 'pageSize': page_size}, headers=headers) - if 'entities' not in response or not response['entities']: + if data_key not in response or not response[data_key]: break - entities.extend(response['entities']) + data.extend(response[data_key]) if response['totalPages'] == page + 1: break page += 1 return { - "total": len(entities), + "total": len(data), "page": 0, - "totalPages": 1 if entities else 0, - "entities": entities, + "totalPages": 1 if data else 0, + data_key: data, } def get_entity(self, entity_tag: str, entity_type: str = ''): From 72015839dea9067f5b31fe0176893110d3f527ab Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 20 Sep 2024 14:30:23 -0700 Subject: [PATCH 04/56] first pass at audit-logs --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/audit_logs.py | 175 ++++++++++++++++++++++++++ 2 files changed, 177 insertions(+) create mode 100644 cortexapps_cli/commands/audit_logs.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index cfb309d..4ef4ba7 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -11,12 +11,14 @@ import cortexapps_cli.commands.teams as teams import cortexapps_cli.commands.catalog as catalog +import cortexapps_cli.commands.audit_logs as audit_logs app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) # add subcommands app.add_typer(teams.app, name="teams") app.add_typer(catalog.app, name="catalog") +app.add_typer(audit_logs.app, name="audit-logs") # global options @app.callback() diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py new file mode 100644 index 0000000..2efe2aa --- /dev/null +++ b/cortexapps_cli/commands/audit_logs.py @@ -0,0 +1,175 @@ +from datetime import datetime +from enum import Enum +import typer + +from rich import print_json + +app = typer.Typer() + +class Action(str, Enum): + CREATE = "CREATE" + DELETE = "DELETE" + UPDATE = "UPDATE" + +class ActorType(str, Enum): + ANONYMOUS = "ANONYMOUS" + API_KEY = "API_KEY" + BACKSTAGE = "BACKSTAGE" + OAUTH2 = "OAUTH2" + PERSONAL_API_KEY = "PERSONAL_API_KEY" + +class ActorRequestType(str, Enum): + API_KEY_ENTITY = "API_KEY_ENTITY" + ATLASSIAN_WEBHOOK = "ATLASSIAN_WEBHOOK" + SCORECARD_BADGES = "SCORECARD_BADGES" + SLACK_COMMAND = "SLACK_COMMAND" + +# Defined in web/src/main/kotlin/com/brainera/web/audit/models/AuditedEntity.kt +# +# Not sure if we should enumerate this or have user explicitly list the items +# they want. User would have to inpsect existing elements to know what the +# possible values are. +# +# Maintenance will be a problem if we use enum. +class ObjectType(str, Enum): + ACCOUNT_FLAG = "ACCOUNT_FLAG" + ACTIVE_DIRECTORY_CONFIGURATION = "ACTIVE_DIRECTORY_CONFIGURATION" + ALLOW_LIST_ENTRY = "ALLOW_LIST_ENTRY" + API_KEY = "API_KEY" + ATLASSIAN_CONFIGURATION = "ATLASSIAN_CONFIGURATION" + AWS_CONFIGURATION = "AWS_CONFIGURATION" + AZURE_DEVOPS_CONFIGURATION = "AZURE_DEVOPS_CONFIGURATION" + AZURE_RESOURCES_CONFIGURATION = "AZURE_RESOURCES_CONFIGURATION" + BAMBOO_HR_CONFIGURATION = "BAMBOO_HR_CONFIGURATION" + BITBUCKET_CONFIGURATION = "BITBUCKET_CONFIGURATION" + BITBUCKET_OAUTH_CONFIGURATION = "BITBUCKET_OAUTH_CONFIGURATION" + BITBUCKET_OAUTH_REGISTRATION = "BITBUCKET_OAUTH_REGISTRATION" + BITBUCKET_ONPREM_CONFIGURATION = "BITBUCKET_ONPREM_CONFIGURATION" + BITBUCKET_ONPREM_WEBHOOK_SECRET = "BITBUCKET_ONPREM_WEBHOOK_SECRET" + BITBUCKET_PERSONAL_CONFIGURATION = "BITBUCKET_PERSONAL_CONFIGURATION" + BUGSNAG_CONFIGURATION = "BUGSNAG_CONFIGURATION" + BUILDKITE_CONFIGURATION = "BUILDKITE_CONFIGURATION" + CATALOG = "CATALOG" + CATALOG_FILTER = "CATALOG_FILTER" + CHECKMARX_SAST_CONFIGURATION = "CHECKMARX_SAST_CONFIGURATION" + CIRCLE_CI_CONFIGURATION = "CIRCLE_CI_CONFIGURATION" + CLICKUP_CONFIGURATION = "CLICKUP_CONFIGURATION" + CODECOV_CONFIGURATION = "CODECOV_CONFIGURATION" + CORALOGIX_CONFIGURATION = "CORALOGIX_CONFIGURATION" + CORTEX_TEAM_ROLES = "CORTEX_TEAM_ROLES" + CORTEX_USER = "CORTEX_USER" + CORTEX_USER_ROLES = "CORTEX_USER_ROLES" + CUSTOM_DATA = "CUSTOM_DATA" + CUSTOM_METRICS_CONFIGURATION = "CUSTOM_METRICS_CONFIGURATION" + CUSTOM_ROLE = "CUSTOM_ROLE" + DATADOG_CONFIGURATION = "DATADOG_CONFIGURATION" + DOMAIN = "DOMAIN" + DYNATRACE_CONFIGURATION = "DYNATRACE_CONFIGURATION" + ENTITY_TYPE_DEFINITION = "ENTITY_TYPE_DEFINITION" + ENTITY_VERIFICATION = "ENTITY_VERIFICATION" + FIREHYDRANT_CONFIGURATION = "FIREHYDRANT_CONFIGURATION" + GITHUB_APP_CONFIGURATION = "GITHUB_APP_CONFIGURATION" + GITHUB_APP_INSTALLATION = "GITHUB_APP_INSTALLATION" + GITHUB_PERSONAL_TOKEN = "GITHUB_PERSONAL_TOKEN" + GITHUB_WEBHOOK_SECRET = "GITHUB_WEBHOOK_SECRET" + GITLAB_CONFIGURATION = "GITLAB_CONFIGURATION" + GOOGLE_CONFIGURATION = "GOOGLE_CONFIGURATION" + INCIDENT_IO_CONFIGURATION = "INCIDENT_IO_CONFIGURATION" + INITIATIVE = "INITIATIVE" + INSTANA_CONFIGURATION = "INSTANA_CONFIGURATION" + JIRA_BASIC_CONFIGURATION = "JIRA_BASIC_CONFIGURATION" + JIRA_CONFIGURATION = "JIRA_CONFIGURATION" + JIRA_OAUTH_CONFIGURATION = "JIRA_OAUTH_CONFIGURATION" + JIRA_OAUTH_REGISTRATION = "JIRA_OAUTH_REGISTRATION" + JIRA_ONPREM_CONFIGURATION = "JIRA_ONPREM_CONFIGURATION" + LAUNCHDARKLY_CONFIGURATION = "LAUNCHDARKLY_CONFIGURATION" + LIGHTSTEP_CONFIGURATION = "LIGHTSTEP_CONFIGURATION" + MEND_SAST_CONFIGURATION = "MEND_SAST_CONFIGURATION" + MEND_SCA_CONFIGURATION = "MEND_SCA_CONFIGURATION" + MICROSOFT_TEAMS_CONFIGURATION = "MICROSOFT_TEAMS_CONFIGURATION" + NEWRELIC_CONFIGURATION = "NEWRELIC_CONFIGURATION" + OAUTH_CONFIGURATION = "OAUTH_CONFIGURATION" + OKTA_CONFIGURATION = "OKTA_CONFIGURATION" + OPENAPI_DEFINITION = "OPENAPI_DEFINITION" + OPSGENIE_CONFIGURATION = "OPSGENIE_CONFIGURATION" + PAGERDUTY_CONFIGURATION = "PAGERDUTY_CONFIGURATION" + PERSONAL_API_KEY = "PERSONAL_API_KEY" + PROMETHEUS_CONFIGURATION = "PROMETHEUS_CONFIGURATION" + RESOURCE = "RESOURCE" + ROLLBAR_CONFIGURATION = "ROLLBAR_CONFIGURATION" + SCORECARD = "SCORECARD" + SCORECARD_FILTER = "SCORECARD_FILTER" + SCORECARD_RULE = "SCORECARD_RULE" + SCORECARD_RULE_FILTER = "SCORECARD_RULE_FILTER" + SECRET = "SECRET" + SECRET_GROUP = "SECRET_GROUP" + SENTRY_CONFIGURATION = "SENTRY_CONFIGURATION" + SERVICE = "SERVICE" + SERVICENOW_CONFIGURATION = "SERVICENOW_CONFIGURATION" + SIGNALFX_CONFIGURATION = "SIGNALFX_CONFIGURATION" + SLACK_CONFIGURATION = "SLACK_CONFIGURATION" + SNYK_CONFIGURATION = "SNYK_CONFIGURATION" + SONARQUBE_CONFIGURATION = "SONARQUBE_CONFIGURATION" + SUMOLOGIC_CONFIGURATION = "SUMOLOGIC_CONFIGURATION" + TEAM = "TEAM" + VERACODE_CONFIGURATION = "VERACODE_CONFIGURATION" + VERIFICATION_PERIOD = "VERIFICATION_PERIOD" + VICTOROPS_CONFIGURATION = "VICTOROPS_CONFIGURATION" + WIZ_CONFIGURATION = "WIZ_CONFIGURATION" + WORKDAY_CONFIGURATION = "WORKDAY_CONFIGURATION" + WORKFLOW = "WORKFLOW" + XMATTERS_CONFIGURATION = "XMATTERS_CONFIGURATION" + +@app.command() +def get( + ctx: typer.Context, + actions: list[Action] | None = typer.Option(None, "--actions", "-a", help="The audit action"), + actorApiKeyIdentifiers: list[str] | None = typer.Option(None, "--actorApiKeyIdentifiers", "-ak", help="API key name associated with audit event"), + actorEmails: list[str] | None = typer.Option(None, "--actorEmails", "-ae", help="email address associated with audit event"), + actorIpAddresses: list[str] | None = typer.Option(None, "--actorIpAddresses", "-ai", help="source IP Addresses associated with audit event"), + actorRequestTypes: list[ActorRequestType] | None = typer.Option(None, "--actorRequestTypes", "-ar", help="request event associated with audit event"), + actorTypes: list[ActorType] | None = typer.Option(None, "--actorTypes", "-at", help="actor that triggered the audit event"), + end_time: datetime = typer.Option(None, "--endTime", "-e", help="End time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + objectIdentifiers: list[str] | None = typer.Option(None, "--objectIdentifiers", "-oi", help="The name of the Cortex object that was modified, ie x-cortex-tag value, metadata field name, etc."), + objectTypes: list[ObjectType] | None = typer.Option(None, "--objectTypes", "-ot", help="ObjectTypes"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + start_time: datetime = typer.Option(None, "--startTime", "-s", help="Start time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + client = ctx.obj["client"] + + params = { + "actions": actions, + "actorApiKeyIdentifiers": actorApiKeyIdentifiers, + "actorEmails": actorEmails, + "actorIpAddresses": actorIpAddresses, + "actorRequestTypes": actorRequestTypes, + "actorTypes": actorTypes, + "endTime": end_time, + "objectIdentifiers": objectIdentifiers, + "objectTypes": objectTypes, + "page": page, + "pageSize": page_size, + "startTime": start_time + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # convert datetime and list types to string + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + if str(type(v)) == "": + params[k] = ','.join(v) + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/audit-logs", params=params) + pass + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/audit-logs", params=params) + pass + + print_json(data=r) From 915c9ab699afdc82f80563c3390fe3bd2cdc3b39 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 20 Sep 2024 16:44:28 -0700 Subject: [PATCH 05/56] Add custom-data commands --- cortexapps_cli/cli.py | 10 +- cortexapps_cli/commands/audit_logs.py | 11 +- cortexapps_cli/commands/custom_data.py | 140 +++++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 13 ++- 4 files changed, 163 insertions(+), 11 deletions(-) create mode 100644 cortexapps_cli/commands/custom_data.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 4ef4ba7..11edb98 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -9,16 +9,18 @@ from cortexapps_cli.cortex_client import CortexClient -import cortexapps_cli.commands.teams as teams -import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.audit_logs as audit_logs +import cortexapps_cli.commands.catalog as catalog +import cortexapps_cli.commands.custom_data as custom_data +import cortexapps_cli.commands.teams as teams app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) # add subcommands -app.add_typer(teams.app, name="teams") -app.add_typer(catalog.app, name="catalog") app.add_typer(audit_logs.app, name="audit-logs") +app.add_typer(catalog.app, name="catalog") +app.add_typer(custom_data.app, name="custom-data") +app.add_typer(teams.app, name="teams") # global options @app.callback() diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index 2efe2aa..52d4654 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -125,10 +125,10 @@ def get( ctx: typer.Context, actions: list[Action] | None = typer.Option(None, "--actions", "-a", help="The audit action"), actorApiKeyIdentifiers: list[str] | None = typer.Option(None, "--actorApiKeyIdentifiers", "-ak", help="API key name associated with audit event"), - actorEmails: list[str] | None = typer.Option(None, "--actorEmails", "-ae", help="email address associated with audit event"), - actorIpAddresses: list[str] | None = typer.Option(None, "--actorIpAddresses", "-ai", help="source IP Addresses associated with audit event"), - actorRequestTypes: list[ActorRequestType] | None = typer.Option(None, "--actorRequestTypes", "-ar", help="request event associated with audit event"), - actorTypes: list[ActorType] | None = typer.Option(None, "--actorTypes", "-at", help="actor that triggered the audit event"), + actorEmails: list[str] | None = typer.Option(None, "--actorEmails", "-ae", help="Email address associated with audit event"), + actorIpAddresses: list[str] | None = typer.Option(None, "--actorIpAddresses", "-ai", help="Source IP Addresses associated with audit event"), + actorRequestTypes: list[ActorRequestType] | None = typer.Option(None, "--actorRequestTypes", "-ar", help="Request event associated with audit event"), + actorTypes: list[ActorType] | None = typer.Option(None, "--actorTypes", "-at", help="Actor that triggered the audit event"), end_time: datetime = typer.Option(None, "--endTime", "-e", help="End time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), objectIdentifiers: list[str] | None = typer.Option(None, "--objectIdentifiers", "-oi", help="The name of the Cortex object that was modified, ie x-cortex-tag value, metadata field name, etc."), objectTypes: list[ObjectType] | None = typer.Option(None, "--objectTypes", "-ot", help="ObjectTypes"), @@ -136,6 +136,9 @@ def get( page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), start_time: datetime = typer.Option(None, "--startTime", "-s", help="Start time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), ): + """ + Note: To see the complete list of possible values, please reference the available filter options for audit logs under Settings in the app. + """ client = ctx.obj["client"] params = { diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py new file mode 100644 index 0000000..80fdc46 --- /dev/null +++ b/cortexapps_cli/commands/custom_data.py @@ -0,0 +1,140 @@ +import json +import typer +from typing_extensions import Annotated + +from rich import print_json + +app = typer.Typer() + +@app.command() +def add( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", "-o", help="When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed."), + key: str = typer.Option(None, "--key", "-k", help="The custom data key to create (only if file input not provided)."), + value: str = typer.Option(None, "--value", "-v", help="The value of the custom data key (only if file input not provided)."), + description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Add custom data for entity + """ + client = ctx.obj["client"] + + params = { + "description": description, + "force": force, + "key": key, + "tag": tag, + "value": value + } + + if file_input: + if description or key or value: + raise typer.BadParameter("When providing a custom input definition file, do not specify any other custom data attributes") + data = json.loads("".join([line for line in file_input])) + else: + if not value: + raise typer.BadParameter("value is required if custom data file is not provided") + if not key: + raise typer.BadParameter("key is required if custom data file is not provided") + + data = { + "key": key, + "value": value + } + + if description: + data["description"] = description + + r = client.post("api/v1/catalog/" + tag + "/custom-data", data=data, params=params) + print_json(json.dumps(r)) + +@app.command() +def bulk( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", "-o", help="When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed."), +): + """ + Add multiple key/values of custom data to multiple entities + """ + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + params = { + "force": force + } + + r = client.put("api/v1/catalog/custom-data", data=data, params=params) + print_json(json.dumps(r)) + +@app.command() +def delete( + ctx: typer.Context, + force: bool = typer.Option(False, "--force", "-o", help="When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed."), + key: str = typer.Option(..., "--key", "-k", help="The custom metadata key"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete custom data for entity + """ + client = ctx.obj["client"] + + params = { + "force": force, + "key": key, + "tag": tag, + } + + r = client.delete("api/v1/catalog/" + tag + "/custom-data", params=params) + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + key: str = typer.Option(..., "--key", "-k", help="The custom metadata key"), +): + """ + Retrieve custom data for entity by key + """ + client = ctx.obj["client"] + + params = { + "key": key, + "tag": tag + } + + r = client.get("api/v1/catalog/" + tag + "/custom-data/" + key, params=params) + + print_json(data=r) + +@app.command() +def list( + ctx: typer.Context, + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + List custom data for entity + """ + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size, + "tag": tag + } + + if page is None: + # if page is not specified, we want to fetch all pages + # Not working: https://cortex1.atlassian.net/browse/CET-13655 + #r = client.fetch("api/v1/catalog/" + tag + "/custom-data", params=params) + pass + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/custom-data", params=params) + + print_json(data=r) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index c025393..da96191 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -10,6 +10,7 @@ def __init__(self, api_key, base_url='https://api.getcortexapp.com'): def data_key_for_endpoint(self, endpoint): end_endpoint = endpoint.split('/')[-1] + print("ep = " + end_endpoint) match end_endpoint: case 'catalog': return 'entities' @@ -17,6 +18,8 @@ def data_key_for_endpoint(self, endpoint): return 'logs' case 'deploys': return 'deployments' + case 'custom-data': + return '' case _: return end_endpoint @@ -77,9 +80,13 @@ def fetch(self, endpoint, params={}, headers={}): data = [] while True: response = self.get(endpoint, params={**params, 'page': page, 'pageSize': page_size}, headers=headers) - if data_key not in response or not response[data_key]: - break - data.extend(response[data_key]) + # Some endpoints just return an array as the root element. + if data_key == '': + data.extend(response) + else: + if data_key not in response or not response[data_key]: + break + data.extend(response[data_key]) if response['totalPages'] == page + 1: break page += 1 From 765a2fd4f5f6667159dfc271ef8b7c42677a1339 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 20 Sep 2024 16:52:17 -0700 Subject: [PATCH 06/56] Add simple descriptions for each command. --- cortexapps_cli/cli.py | 4 ++++ cortexapps_cli/commands/audit_logs.py | 2 +- cortexapps_cli/commands/catalog.py | 2 +- cortexapps_cli/commands/custom_data.py | 2 +- cortexapps_cli/commands/teams.py | 2 +- 5 files changed, 8 insertions(+), 4 deletions(-) diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 11edb98..04e9925 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -69,6 +69,10 @@ def global_callback( @app.command() def version(): + + """ + Show the version and exit. + """ try: with open("pyproject.toml", "rb") as f: pyproject = tomllib.load(f) diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index 52d4654..1a0d86a 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -4,7 +4,7 @@ from rich import print_json -app = typer.Typer() +app = typer.Typer(help="Audit log commands") class Action(str, Enum): CREATE = "CREATE" diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index cb1e25c..ac13072 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -2,7 +2,7 @@ from rich import print_json -app = typer.Typer() +app = typer.Typer(help="Catalog commands") @app.command() def list( diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 80fdc46..4f36b35 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -4,7 +4,7 @@ from rich import print_json -app = typer.Typer() +app = typer.Typer(help="Custom data commands") @app.command() def add( diff --git a/cortexapps_cli/commands/teams.py b/cortexapps_cli/commands/teams.py index c852a9f..ce97783 100644 --- a/cortexapps_cli/commands/teams.py +++ b/cortexapps_cli/commands/teams.py @@ -7,7 +7,7 @@ from cortexapps_cli.models.team import Team -app = typer.Typer() +app = typer.Typer(help="Teams commands") class TeamType(str, Enum): CORTEX = "CORTEX" From 7bc72b15a6582c16f19b4ef7e82f58c6150d2119 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 23 Sep 2024 10:27:17 -0700 Subject: [PATCH 07/56] Add custom-events; minor clean up --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/audit_logs.py | 100 +---------- cortexapps_cli/commands/custom_data.py | 18 +- cortexapps_cli/commands/custom_events.py | 214 +++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 3 +- 5 files changed, 223 insertions(+), 114 deletions(-) create mode 100644 cortexapps_cli/commands/custom_events.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 04e9925..e06ae4a 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -12,6 +12,7 @@ import cortexapps_cli.commands.audit_logs as audit_logs import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.custom_data as custom_data +import cortexapps_cli.commands.custom_events as custom_events import cortexapps_cli.commands.teams as teams app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) @@ -20,6 +21,7 @@ app.add_typer(audit_logs.app, name="audit-logs") app.add_typer(catalog.app, name="catalog") app.add_typer(custom_data.app, name="custom-data") +app.add_typer(custom_events.app, name="custom-events") app.add_typer(teams.app, name="teams") # global options diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index 1a0d86a..d5b1ac5 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -24,102 +24,6 @@ class ActorRequestType(str, Enum): SCORECARD_BADGES = "SCORECARD_BADGES" SLACK_COMMAND = "SLACK_COMMAND" -# Defined in web/src/main/kotlin/com/brainera/web/audit/models/AuditedEntity.kt -# -# Not sure if we should enumerate this or have user explicitly list the items -# they want. User would have to inpsect existing elements to know what the -# possible values are. -# -# Maintenance will be a problem if we use enum. -class ObjectType(str, Enum): - ACCOUNT_FLAG = "ACCOUNT_FLAG" - ACTIVE_DIRECTORY_CONFIGURATION = "ACTIVE_DIRECTORY_CONFIGURATION" - ALLOW_LIST_ENTRY = "ALLOW_LIST_ENTRY" - API_KEY = "API_KEY" - ATLASSIAN_CONFIGURATION = "ATLASSIAN_CONFIGURATION" - AWS_CONFIGURATION = "AWS_CONFIGURATION" - AZURE_DEVOPS_CONFIGURATION = "AZURE_DEVOPS_CONFIGURATION" - AZURE_RESOURCES_CONFIGURATION = "AZURE_RESOURCES_CONFIGURATION" - BAMBOO_HR_CONFIGURATION = "BAMBOO_HR_CONFIGURATION" - BITBUCKET_CONFIGURATION = "BITBUCKET_CONFIGURATION" - BITBUCKET_OAUTH_CONFIGURATION = "BITBUCKET_OAUTH_CONFIGURATION" - BITBUCKET_OAUTH_REGISTRATION = "BITBUCKET_OAUTH_REGISTRATION" - BITBUCKET_ONPREM_CONFIGURATION = "BITBUCKET_ONPREM_CONFIGURATION" - BITBUCKET_ONPREM_WEBHOOK_SECRET = "BITBUCKET_ONPREM_WEBHOOK_SECRET" - BITBUCKET_PERSONAL_CONFIGURATION = "BITBUCKET_PERSONAL_CONFIGURATION" - BUGSNAG_CONFIGURATION = "BUGSNAG_CONFIGURATION" - BUILDKITE_CONFIGURATION = "BUILDKITE_CONFIGURATION" - CATALOG = "CATALOG" - CATALOG_FILTER = "CATALOG_FILTER" - CHECKMARX_SAST_CONFIGURATION = "CHECKMARX_SAST_CONFIGURATION" - CIRCLE_CI_CONFIGURATION = "CIRCLE_CI_CONFIGURATION" - CLICKUP_CONFIGURATION = "CLICKUP_CONFIGURATION" - CODECOV_CONFIGURATION = "CODECOV_CONFIGURATION" - CORALOGIX_CONFIGURATION = "CORALOGIX_CONFIGURATION" - CORTEX_TEAM_ROLES = "CORTEX_TEAM_ROLES" - CORTEX_USER = "CORTEX_USER" - CORTEX_USER_ROLES = "CORTEX_USER_ROLES" - CUSTOM_DATA = "CUSTOM_DATA" - CUSTOM_METRICS_CONFIGURATION = "CUSTOM_METRICS_CONFIGURATION" - CUSTOM_ROLE = "CUSTOM_ROLE" - DATADOG_CONFIGURATION = "DATADOG_CONFIGURATION" - DOMAIN = "DOMAIN" - DYNATRACE_CONFIGURATION = "DYNATRACE_CONFIGURATION" - ENTITY_TYPE_DEFINITION = "ENTITY_TYPE_DEFINITION" - ENTITY_VERIFICATION = "ENTITY_VERIFICATION" - FIREHYDRANT_CONFIGURATION = "FIREHYDRANT_CONFIGURATION" - GITHUB_APP_CONFIGURATION = "GITHUB_APP_CONFIGURATION" - GITHUB_APP_INSTALLATION = "GITHUB_APP_INSTALLATION" - GITHUB_PERSONAL_TOKEN = "GITHUB_PERSONAL_TOKEN" - GITHUB_WEBHOOK_SECRET = "GITHUB_WEBHOOK_SECRET" - GITLAB_CONFIGURATION = "GITLAB_CONFIGURATION" - GOOGLE_CONFIGURATION = "GOOGLE_CONFIGURATION" - INCIDENT_IO_CONFIGURATION = "INCIDENT_IO_CONFIGURATION" - INITIATIVE = "INITIATIVE" - INSTANA_CONFIGURATION = "INSTANA_CONFIGURATION" - JIRA_BASIC_CONFIGURATION = "JIRA_BASIC_CONFIGURATION" - JIRA_CONFIGURATION = "JIRA_CONFIGURATION" - JIRA_OAUTH_CONFIGURATION = "JIRA_OAUTH_CONFIGURATION" - JIRA_OAUTH_REGISTRATION = "JIRA_OAUTH_REGISTRATION" - JIRA_ONPREM_CONFIGURATION = "JIRA_ONPREM_CONFIGURATION" - LAUNCHDARKLY_CONFIGURATION = "LAUNCHDARKLY_CONFIGURATION" - LIGHTSTEP_CONFIGURATION = "LIGHTSTEP_CONFIGURATION" - MEND_SAST_CONFIGURATION = "MEND_SAST_CONFIGURATION" - MEND_SCA_CONFIGURATION = "MEND_SCA_CONFIGURATION" - MICROSOFT_TEAMS_CONFIGURATION = "MICROSOFT_TEAMS_CONFIGURATION" - NEWRELIC_CONFIGURATION = "NEWRELIC_CONFIGURATION" - OAUTH_CONFIGURATION = "OAUTH_CONFIGURATION" - OKTA_CONFIGURATION = "OKTA_CONFIGURATION" - OPENAPI_DEFINITION = "OPENAPI_DEFINITION" - OPSGENIE_CONFIGURATION = "OPSGENIE_CONFIGURATION" - PAGERDUTY_CONFIGURATION = "PAGERDUTY_CONFIGURATION" - PERSONAL_API_KEY = "PERSONAL_API_KEY" - PROMETHEUS_CONFIGURATION = "PROMETHEUS_CONFIGURATION" - RESOURCE = "RESOURCE" - ROLLBAR_CONFIGURATION = "ROLLBAR_CONFIGURATION" - SCORECARD = "SCORECARD" - SCORECARD_FILTER = "SCORECARD_FILTER" - SCORECARD_RULE = "SCORECARD_RULE" - SCORECARD_RULE_FILTER = "SCORECARD_RULE_FILTER" - SECRET = "SECRET" - SECRET_GROUP = "SECRET_GROUP" - SENTRY_CONFIGURATION = "SENTRY_CONFIGURATION" - SERVICE = "SERVICE" - SERVICENOW_CONFIGURATION = "SERVICENOW_CONFIGURATION" - SIGNALFX_CONFIGURATION = "SIGNALFX_CONFIGURATION" - SLACK_CONFIGURATION = "SLACK_CONFIGURATION" - SNYK_CONFIGURATION = "SNYK_CONFIGURATION" - SONARQUBE_CONFIGURATION = "SONARQUBE_CONFIGURATION" - SUMOLOGIC_CONFIGURATION = "SUMOLOGIC_CONFIGURATION" - TEAM = "TEAM" - VERACODE_CONFIGURATION = "VERACODE_CONFIGURATION" - VERIFICATION_PERIOD = "VERIFICATION_PERIOD" - VICTOROPS_CONFIGURATION = "VICTOROPS_CONFIGURATION" - WIZ_CONFIGURATION = "WIZ_CONFIGURATION" - WORKDAY_CONFIGURATION = "WORKDAY_CONFIGURATION" - WORKFLOW = "WORKFLOW" - XMATTERS_CONFIGURATION = "XMATTERS_CONFIGURATION" - @app.command() def get( ctx: typer.Context, @@ -131,7 +35,7 @@ def get( actorTypes: list[ActorType] | None = typer.Option(None, "--actorTypes", "-at", help="Actor that triggered the audit event"), end_time: datetime = typer.Option(None, "--endTime", "-e", help="End time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), objectIdentifiers: list[str] | None = typer.Option(None, "--objectIdentifiers", "-oi", help="The name of the Cortex object that was modified, ie x-cortex-tag value, metadata field name, etc."), - objectTypes: list[ObjectType] | None = typer.Option(None, "--objectTypes", "-ot", help="ObjectTypes"), + objectTypes: list[str] | None = typer.Option(None, "--objectTypes", "-ot", help="ObjectTypes"), page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), start_time: datetime = typer.Option(None, "--startTime", "-s", help="Start time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), @@ -169,10 +73,8 @@ def get( if page is None: # if page is not specified, we want to fetch all pages r = client.fetch("api/v1/audit-logs", params=params) - pass else: # if page is specified, we want to fetch only that page r = client.get("api/v1/audit-logs", params=params) - pass print_json(data=r) diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 4f36b35..be87224 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -22,11 +22,7 @@ def add( client = ctx.obj["client"] params = { - "description": description, "force": force, - "key": key, - "tag": tag, - "value": value } if file_input: @@ -84,8 +80,7 @@ def delete( params = { "force": force, - "key": key, - "tag": tag, + "key": key } r = client.delete("api/v1/catalog/" + tag + "/custom-data", params=params) @@ -101,12 +96,7 @@ def get( """ client = ctx.obj["client"] - params = { - "key": key, - "tag": tag - } - - r = client.get("api/v1/catalog/" + tag + "/custom-data/" + key, params=params) + r = client.get("api/v1/catalog/" + tag + "/custom-data/" + key) print_json(data=r) @@ -124,14 +114,14 @@ def list( params = { "page": page, - "pageSize": page_size, - "tag": tag + "pageSize": page_size } if page is None: # if page is not specified, we want to fetch all pages # Not working: https://cortex1.atlassian.net/browse/CET-13655 #r = client.fetch("api/v1/catalog/" + tag + "/custom-data", params=params) + r = client.get("api/v1/catalog/" + tag + "/custom-data", params=params) pass else: # if page is specified, we want to fetch only that page diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py new file mode 100644 index 0000000..dd62df8 --- /dev/null +++ b/cortexapps_cli/commands/custom_events.py @@ -0,0 +1,214 @@ +from collections import defaultdict +from datetime import datetime +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Custom events commands") + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +@app.command() +def update_by_uuid( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + custom_data: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs (only if file input not provided."), + description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), + title: str = typer.Option(None, "--title", "-ti", help="The title of the custome event (only if file input not provided)."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + uuid: str = typer.Option(..., "--uuid", "-u", help="UUID of custom event."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event (only required if file input not provided)."), + url: str = typer.Option(None, "--url", help="The url of the custom event (optional, only required if file input not provided)."), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of custom event, defaults to current time (only if file input not provided)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Create custom event for entity + """ + + client = ctx.obj["client"] + + if file_input: + if description or title or custom_data or event or url: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + if timestamp: + data["timestamp"] = timestamp + + else: + data = { + "title": title, + "timestamp": timestamp, + "type": event, + "url": url, + } + + if description: + data["description"] = description + if url: + data["url"] = url + if custom_data: + data["customData"] = dict(custom_data) + + # convert datetime type to string + for k, v in data.items(): + if str(type(v)) == "": + data[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + r = client.put("api/v1/catalog/" + tag + "/custom-events/" + uuid, data=data) + print_json(json.dumps(r)) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + custom_data: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs (only if file input not provided."), + description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), + title: str = typer.Option(None, "--title", "-ti", help="The title of the custome event (only if file input not provided)."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event (only required if file input not provided)."), + url: str = typer.Option(None, "--url", "-u", help="The url of the custom event (optional, only required if file input not provided)."), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of custom event, defaults to current time (only if file input not provided)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Create custom event for entity + """ + + client = ctx.obj["client"] + + if file_input: + if description or title or custom_data or event or url: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + if timestamp: + data["timestamp"] = timestamp + + else: + if not title: + raise typer.BadParameter("title is required if custom event file is not provided") + if not event: + raise typer.BadParameter("type is required if custom event file is not provided") + + data = { + "title": title, + "timestamp": timestamp, + "type": event, + "url": url, + } + + if description: + data["description"] = description + if url: + data["url"] = url + if custom_data: + data["customData"] = dict(custom_data) + + # convert datetime type to string + for k, v in data.items(): + if str(type(v)) == "": + data[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + r = client.post("api/v1/catalog/" + tag + "/custom-events", data=data) + print_json(json.dumps(r)) + +@app.command() +def delete_all( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event, defaults to all."), + timestamp: datetime = typer.Option(None, "--timestamp", "-ts", help="Optional timestamp of custom events to delete.", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Delete all custom events for an entity + """ + + client = ctx.obj["client"] + + params = { + "type": event, + "timestamp": timestamp + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + # convert datetime type to string + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + + r = client.delete("api/v1/catalog/" + tag + "/custom-events", params=params) + +@app.command() +def list( + ctx: typer.Context, + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + event: str = typer.Option(None, "--type", "-y", help="The type of the custom event, defaults to all."), + timestamp: datetime = typer.Option(None, "--timestamp", "-ts", help="Optional timestamp of custom events to delete.", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + List custom events for entity + """ + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size, + "timestamp": timestamp, + "type": event + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag + "/custom-events", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/custom-events", params=params) + + print_json(data=r) + +@app.command() +def get_by_uuid( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + uuid: str = typer.Option(..., "--uuid", "-u", help="UUID of custom event."), +): + """ + Get custom event by UUID + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/custom-events/" + uuid) + + print_json(data=r) + +@app.command() +def delete_by_uuid( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + uuid: str = typer.Option(..., "--uuid", "-u", help="UUID of custom event."), +): + """ + Delete custom events by UUID + """ + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + tag + "/custom-events/" + uuid) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index da96191..1633def 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -10,7 +10,6 @@ def __init__(self, api_key, base_url='https://api.getcortexapp.com'): def data_key_for_endpoint(self, endpoint): end_endpoint = endpoint.split('/')[-1] - print("ep = " + end_endpoint) match end_endpoint: case 'catalog': return 'entities' @@ -20,6 +19,8 @@ def data_key_for_endpoint(self, endpoint): return 'deployments' case 'custom-data': return '' + case 'custom-events': + return 'events' case _: return end_endpoint From 703110c388d96e6989ee159772c75628a127530c Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Thu, 26 Sep 2024 15:02:56 -0400 Subject: [PATCH 08/56] add raw request commands --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/raw.py | 129 ++++++++++++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 34 +++++---- 3 files changed, 151 insertions(+), 14 deletions(-) create mode 100644 cortexapps_cli/commands/raw.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index e06ae4a..852b761 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -13,6 +13,7 @@ import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.custom_data as custom_data import cortexapps_cli.commands.custom_events as custom_events +import cortexapps_cli.commands.raw as raw import cortexapps_cli.commands.teams as teams app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) @@ -22,6 +23,7 @@ app.add_typer(catalog.app, name="catalog") app.add_typer(custom_data.app, name="custom-data") app.add_typer(custom_events.app, name="custom-events") +app.add_typer(raw.app, name="raw") app.add_typer(teams.app, name="teams") # global options diff --git a/cortexapps_cli/commands/raw.py b/cortexapps_cli/commands/raw.py new file mode 100644 index 0000000..75f32f8 --- /dev/null +++ b/cortexapps_cli/commands/raw.py @@ -0,0 +1,129 @@ +import json +import yaml +from typing import List, Optional + +import typer +from typing_extensions import Annotated + +from rich import print_json +app = typer.Typer() + +def parse_multi_value_option(option: List[str] | None) -> dict: + if option is None: + return {} + return dict([param.split('=') for param in option]) + +def guess_content_type(data: str) -> str: + try: + json.loads(data) + return 'application/json' + except json.JSONDecodeError: + try: + yaml_data = yaml.safe_load(data) + if isinstance(yaml_data, dict): + if 'openapi' in yaml_data: + return 'application/openapi;charset=utf-8' + return 'application/yaml' + return 'text/plain' + except yaml.YAMLError: + return 'text/plain' + +class RawCommandOptions: + endpoint = typer.Option(..., "--endpoint", "-e", help="API endpoint", show_default=False) + headers = Annotated[ + Optional[List[str]], + typer.Option("--headers", "-H", help="Headers to include in the request, in the format HeaderName=value", show_default=False) + ] + params = Annotated[Optional[List[str]], typer.Option("--params", "-P", help="Parameters to include in the request, in the format ParamName=value", show_default=False)] + input_file = Annotated[typer.FileText, typer.Option("--file", "-f", help="File to read the request body from, use - for stdin")] + content_type = typer.Option(None, "--content-type", "-c", help="Content type of the request body (leave blank to guess)") + + +@app.command() +def get( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], +): + """ + Make a GET request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + r = client.get(endpoint, headers=req_headers, params=req_params) + print_json(data=r) + +@app.command() +def fetch( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], +): + """ + Make a GET request to the API, and automatically fetch all pages + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + r = client.fetch(endpoint, headers=req_headers, params=req_params) + print_json(json.dumps(r)) + +@app.command() +def delete( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], +): + """ + Make a DELETE request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + r = client.delete(endpoint, headers=req_headers, params=req_params) + if (r): + print_json(json.dumps(r)) + +@app.command() +def post( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], + content_type: str = RawCommandOptions.content_type, + input: RawCommandOptions.input_file = '-' +): + """ + Make a POST request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + data = "".join([line for line in input]) + content_type = content_type or guess_content_type(data) + r = client.post(endpoint, headers=req_headers, params=req_params, data=data, raw_body=True, content_type=content_type) + print_json(json.dumps(r)) + +@app.command() +def put( + ctx: typer.Context, + endpoint: str = RawCommandOptions.endpoint, + headers: RawCommandOptions.headers = [], + params: RawCommandOptions.params = [], + content_type: str = RawCommandOptions.content_type, + input: RawCommandOptions.input_file = '-' +): + """ + Make a PUT request to the API + """ + req_headers = parse_multi_value_option(headers) + req_params = parse_multi_value_option(params) + client = ctx.obj["client"] + data = "".join([line for line in input]) + content_type = content_type or guess_content_type(data) + r = client.put(endpoint, headers=req_headers, params=req_params, data=data, raw_body=True, content_type=content_type) + print_json(json.dumps(r)) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 1633def..a14ec3d 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -24,14 +24,20 @@ def data_key_for_endpoint(self, endpoint): case _: return end_endpoint - def request(self, method, endpoint, params={}, headers={}, data=None, raw=False): + def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=False, raw_response=False, content_type='application/json'): req_headers = { 'Authorization': f'Bearer {self.api_key}', - 'Content-Type': 'application/json', + 'Content-Type': content_type, **headers } url = '/'.join([self.base_url.rstrip('/'), endpoint.lstrip('/')]) - response = requests.request(method, url, params=params, headers=req_headers, json=data) + + req_data = data + if not raw_body: + if content_type == 'application/json' and isinstance(data, dict): + req_data = json.dumps(data) + + response = requests.request(method, url, params=params, headers=req_headers, data=req_data) if not response.ok: try: @@ -47,7 +53,7 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw=False) except json.JSONDecodeError: # if we can't parse the error message, just raise the HTTP error response.raise_for_status() - if raw: + if raw_response: return response try: @@ -60,18 +66,18 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw=False) else: return None - def get(self, endpoint, params={}, headers={}, raw=False): - return self.request('GET', endpoint, params=params, headers=headers, raw=raw) + def get(self, endpoint, params={}, headers={}, raw_response=False): + return self.request('GET', endpoint, params=params, headers=headers, raw_response=raw_response) - def post(self, endpoint, data={}, params={}, headers={}, raw=False): - return self.request('POST', endpoint, data=data, params=params, headers=headers, raw=raw) + def post(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): + return self.request('POST', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) - def put(self, endpoint, data={}, params={}, headers={}, raw=False): - return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw=raw) + def put(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): + return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw_bodyy=raw_body, raw_response=raw_response, content_type=content_type) + + def delete(self, endpoint, params={}, headers={}, raw_response=False): + return self.request('DELETE', endpoint, params=params, headers=headers, raw_response=raw_response) - def delete(self, endpoint, params={}, headers={}, raw=False): - return self.request('DELETE', endpoint, params=params, headers=headers, raw=raw) - def fetch(self, endpoint, params={}, headers={}): # do paginated fetch, page number is indexed at 0 # param page is page number, param pageSize is page size, default 250 @@ -84,7 +90,7 @@ def fetch(self, endpoint, params={}, headers={}): # Some endpoints just return an array as the root element. if data_key == '': data.extend(response) - else: + else: if data_key not in response or not response[data_key]: break data.extend(response[data_key]) From 4a4c9da0bca375c2c11a5c673ab36ae75cad7146 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Thu, 26 Sep 2024 15:33:32 -0400 Subject: [PATCH 09/56] add a blank line to raw post/put when reading from interactive terminal stdin --- cortexapps_cli/commands/raw.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cortexapps_cli/commands/raw.py b/cortexapps_cli/commands/raw.py index 75f32f8..c804ab7 100644 --- a/cortexapps_cli/commands/raw.py +++ b/cortexapps_cli/commands/raw.py @@ -1,5 +1,6 @@ import json import yaml +import sys from typing import List, Optional import typer @@ -106,6 +107,8 @@ def post( data = "".join([line for line in input]) content_type = content_type or guess_content_type(data) r = client.post(endpoint, headers=req_headers, params=req_params, data=data, raw_body=True, content_type=content_type) + if input == sys.stdin and sys.stdin.isatty() and sys.stdout.isatty(): + print("") print_json(json.dumps(r)) @app.command() @@ -126,4 +129,6 @@ def put( data = "".join([line for line in input]) content_type = content_type or guess_content_type(data) r = client.put(endpoint, headers=req_headers, params=req_params, data=data, raw_body=True, content_type=content_type) + if input == sys.stdin and sys.stdin.isatty() and sys.stdout.isatty(): + print("") print_json(json.dumps(r)) From 2471abe43d5d432b9e7f6da259f29406086710e3 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Thu, 26 Sep 2024 15:44:13 -0400 Subject: [PATCH 10/56] give a nice error when invalid param --- cortexapps_cli/commands/raw.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/cortexapps_cli/commands/raw.py b/cortexapps_cli/commands/raw.py index c804ab7..a39c9c0 100644 --- a/cortexapps_cli/commands/raw.py +++ b/cortexapps_cli/commands/raw.py @@ -12,7 +12,10 @@ def parse_multi_value_option(option: List[str] | None) -> dict: if option is None: return {} - return dict([param.split('=') for param in option]) + try: + return dict([param.split('=') for param in option]) + except: + raise typer.BadParameter("Invalid parameter format, use Name=value") def guess_content_type(data: str) -> str: try: From 7e474ea4a2e2a5d1fcfb68405bca66530b4d9b67 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 30 Sep 2024 11:12:43 -0700 Subject: [PATCH 11/56] Add dependencies commands --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/custom_data.py | 14 ++ cortexapps_cli/commands/dependencies.py | 243 ++++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 8 +- 4 files changed, 264 insertions(+), 3 deletions(-) create mode 100644 cortexapps_cli/commands/dependencies.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 852b761..8a87fff 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -13,6 +13,7 @@ import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.custom_data as custom_data import cortexapps_cli.commands.custom_events as custom_events +import cortexapps_cli.commands.dependencies as dependencies import cortexapps_cli.commands.raw as raw import cortexapps_cli.commands.teams as teams @@ -23,6 +24,7 @@ app.add_typer(catalog.app, name="catalog") app.add_typer(custom_data.app, name="custom-data") app.add_typer(custom_events.app, name="custom-events") +app.add_typer(dependencies.app, name="dependencies") app.add_typer(raw.app, name="raw") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index be87224..3ae6ea9 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -5,6 +5,20 @@ from rich import print_json app = typer.Typer(help="Custom data commands") +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() @app.command() def add( diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py new file mode 100644 index 0000000..58a4151 --- /dev/null +++ b/cortexapps_cli/commands/dependencies.py @@ -0,0 +1,243 @@ +from collections import defaultdict +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Dependency commands") + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +@app.command() +def create( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + description: str = typer.Option("", "--description", "-d", help="The description of the dependency"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + metadata: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional metadata key=value pairs (only; will be merged with file input"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Create dependency from entity + """ + + client = ctx.obj["client"] + + params = { + } + + if method: + params["method"] = method + if path: + params["path"] = path + + data = { + "description": "", + "metadata": { + } + } + + dependency_metadata = { + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + # if metadata provided in file and command line, command line takes precedence + if metadata: + data["metadata"] = data["metadata"] | dict(metadata) + + if description: + data["description"] = description + + r = client.post("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, data=data, params=params) + print_json(json.dumps(r)) + +@app.command() +def delete_in_bulk( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing dependency values to delete; can be passed as stdin with -, example: -f-")] = None, +): + """ + Delete dependencies in bulk, see https://docs.cortex.io/docs/api/delete-dependencies-in-bulk for format of input file + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.delete("api/v1/catalog/dependencies", data=data) + +@app.command() +def add_in_bulk( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing dependency values to create or update; can be passed as stdin with -, example: -f-")] = None, +): + """ + Create or update dependencies in bulk, see https://docs.cortex.io/docs/api/create-or-update-dependencies-in-bulk + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/catalog/dependencies", data=data) + +@app.command() +def delete_all( + ctx: typer.Context, + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), +): + """ + Deletes any outgoing dependencies that were created via the API from the entity + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + caller_tag + "/dependencies") + +@app.command() +def get_all( + ctx: typer.Context, + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + include_incoming: bool = typer.Option(True, "--include-incoming", "-i", help="Include incoming dependencies"), + include_outgoing: bool = typer.Option(False, "--include-outgoing", "-o", help="Include outgoing dependencies"), + page: int = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Retrieve all dependencies for an entity + """ + + params = { + "includeIncoming": include_incoming, + "includeOutgoing": include_outgoing, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client = ctx.obj["client"] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + caller_tag + "/dependencies", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + caller_tag + "/dependencies", params=params) + + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Delete a dependency from an entity + """ + + params = { + "method": method, + "path": path + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, params=params) + +@app.command() +def get( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Retrieve dependency betweek entities + """ + + params = { + "method": method, + "path": path + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, params=params) + + print_json(data=r) + +@app.command() +def update( + ctx: typer.Context, + callee_tag: str = typer.Option(..., "--callee-tag", "-e", help="The entity tag (x-cortex-tag) for the caller entity (\"to\" entity)"), + caller_tag: str = typer.Option(..., "--caller-tag", "-r", help="The entity tag (x-cortex-tag) for the caller entity (\"from\" entity)"), + description: str = typer.Option("", "--description", "-d", help="The description of the dependency"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + metadata: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional metadata key=value pairs; will be merged with file input"), + method: str = typer.Option(None, "--method", "-m", help="The HTTP method type of the dependency"), + path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") +): + """ + Update dependency between entities + """ + + client = ctx.obj["client"] + + params = { + } + + if method: + params["method"] = method + if path: + params["path"] = path + + data = { + "description": "", + "metadata": { + } + } + + dependency_metadata = { + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + # if metadata provided in file and command line, command line takes precedence + if metadata: + data["metadata"] = data["metadata"] | dict(metadata) + + if description: + data["description"] = description + + r = client.put("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, data=data, params=params) + print_json(json.dumps(r)) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index a14ec3d..836328c 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -21,6 +21,8 @@ def data_key_for_endpoint(self, endpoint): return '' case 'custom-events': return 'events' + case 'dependencies': + return 'dependencies' case _: return end_endpoint @@ -73,10 +75,10 @@ def post(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_res return self.request('POST', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) def put(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): - return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw_bodyy=raw_body, raw_response=raw_response, content_type=content_type) + return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) - def delete(self, endpoint, params={}, headers={}, raw_response=False): - return self.request('DELETE', endpoint, params=params, headers=headers, raw_response=raw_response) + def delete(self, endpoint, data={}, params={}, headers={}, raw_response=False): + return self.request('DELETE', endpoint, data=data, params=params, headers=headers, raw_response=raw_response) def fetch(self, endpoint, params={}, headers={}): # do paginated fetch, page number is indexed at 0 From 2d2886d3b4474e1b2b6e3a4adda7e196e2834567 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Tue, 1 Oct 2024 16:35:12 -0700 Subject: [PATCH 12/56] Add custom-metrics --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/custom_metrics.py | 141 ++++++++++++++++++++++ cortexapps_cli/commands/dependencies.py | 6 - cortexapps_cli/cortex_client.py | 18 ++- 4 files changed, 160 insertions(+), 7 deletions(-) create mode 100644 cortexapps_cli/commands/custom_metrics.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 8a87fff..feee99c 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -13,6 +13,7 @@ import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.custom_data as custom_data import cortexapps_cli.commands.custom_events as custom_events +import cortexapps_cli.commands.custom_metrics as custom_metrics import cortexapps_cli.commands.dependencies as dependencies import cortexapps_cli.commands.raw as raw import cortexapps_cli.commands.teams as teams @@ -24,6 +25,7 @@ app.add_typer(catalog.app, name="catalog") app.add_typer(custom_data.app, name="custom-data") app.add_typer(custom_events.app, name="custom-events") +app.add_typer(custom_metrics.app, name="custom-metrics") app.add_typer(dependencies.app, name="dependencies") app.add_typer(raw.app, name="raw") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/custom_metrics.py b/cortexapps_cli/commands/custom_metrics.py new file mode 100644 index 0000000..db92759 --- /dev/null +++ b/cortexapps_cli/commands/custom_metrics.py @@ -0,0 +1,141 @@ +from collections import defaultdict +from datetime import datetime +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Custom metrics commands") + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return [] + result = [] + for value in values: + ts, v = value.split('=') + result.append({"timestamp": ts, "value": v}) + return result + +def _convert_datetime_to_string(params): + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + return params + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + start_date: datetime = typer.Option(None, "--start-date", "-s", help="Start date for the filter (inclusive). Default: 6 months", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + end_date: datetime = typer.Option(None, "--end-date", "-s", help="End date for the filter (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + List custom metrics data points for an entity + """ + + client = ctx.obj["client"] + + params = { + "startDate": start_date, + "endDate": end_date, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + params = _convert_datetime_to_string(params) + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, params=params) + + print_json(json.dumps(r)) + +@app.command() +def add( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-s", help="Timestamp for the data point; cannot be earlier than 6 months", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + value: float = typer.Option(..., "--value", "-v", help="Value for the data point"), +): + """ + Add a single custom metric data point for entity + """ + + client = ctx.obj["client"] + + data = { + "timestamp": timestamp, + "vaue": value + } + + params = _convert_datetime_to_string(params) + + r = client.post("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, data=data) + +@app.command() +def add_in_bulk( + ctx: typer.Context, + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, + series: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of timestamp=value pairs."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Add multiple custom metric data points for entity, can be provided in file, command line or combination of both + """ + + client = ctx.obj["client"] + + data = { + "series": [] + } + series_data = { + "series": series + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + if series: + for item in series: + data["series"].append(item) + + r = client.post("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag + "/bulk", data=data) + +@app.command() +def delete( + ctx: typer.Context, + custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + start_date: datetime = typer.Option(None, "--start-date", "-s", help="Start date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + end_date: datetime = typer.Option(None, "--end-date", "-s", help="End date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Delete custom metric data points for entity + """ + + client = ctx.obj["client"] + + params = { + "startDate": start_date, + "endDate": end_date + } + params = _convert_datetime_to_string(params) + + r = client.delete("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, params=params) diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py index 58a4151..f3b357e 100644 --- a/cortexapps_cli/commands/dependencies.py +++ b/cortexapps_cli/commands/dependencies.py @@ -52,9 +52,6 @@ def create( } } - dependency_metadata = { - } - if file_input: data = json.loads("".join([line for line in file_input])) @@ -226,9 +223,6 @@ def update( } } - dependency_metadata = { - } - if file_input: data = json.loads("".join([line for line in file_input])) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 836328c..c349796 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -8,19 +8,35 @@ def __init__(self, api_key, base_url='https://api.getcortexapp.com'): self.api_key = api_key self.base_url = base_url + # Might need to rethink logic. Not all gets have an indicator as the + # last path element, ie custom-metrics. def data_key_for_endpoint(self, endpoint): - end_endpoint = endpoint.split('/')[-1] + api_paths = endpoint.split('/') + if "eng-intel" in api_paths: + return 'data' + + #end_endpoint = endpoint.split('/')[-1] + end_endpoint = api_paths[-1] match end_endpoint: + # https://api.getcortexapp.com/api/v1/catalog case 'catalog': return 'entities' + # https://api.getcortexapp.com/api/v1/audit-logs case 'audit-logs': return 'logs' + # https://api.getcortexapp.com/api/v1/catalog/:tagOrId/deploys case 'deploys': return 'deployments' + # https://api.getcortexapp.com/api/v1/catalog/:tagOrId/custom-data case 'custom-data': return '' + # https://api.getcortexapp.com/api/v1/catalog/:tagOrId/custom-events case 'custom-events': return 'events' + # https://api.getcortexapp.com/api/v1/eng-intel/custom-metrics/:customMetricKey/entity/:tagOrId + case 'custom-metrics': + return 'data' + # https://api.getcortexapp.com/api/v1/catalog/:callerTag/dependencies case 'dependencies': return 'dependencies' case _: From ca41f9cabb66ad8ab5c31aac0b7f535ecbaf3565 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 7 Oct 2024 16:58:13 -0700 Subject: [PATCH 13/56] Add deploys command --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/dependencies.py | 32 ++-- cortexapps_cli/commands/deploys.py | 233 ++++++++++++++++++++++++ 3 files changed, 252 insertions(+), 15 deletions(-) create mode 100644 cortexapps_cli/commands/deploys.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index feee99c..9fa6ce3 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -15,6 +15,7 @@ import cortexapps_cli.commands.custom_events as custom_events import cortexapps_cli.commands.custom_metrics as custom_metrics import cortexapps_cli.commands.dependencies as dependencies +import cortexapps_cli.commands.deploys as deploys import cortexapps_cli.commands.raw as raw import cortexapps_cli.commands.teams as teams @@ -27,6 +28,7 @@ app.add_typer(custom_events.app, name="custom-events") app.add_typer(custom_metrics.app, name="custom-metrics") app.add_typer(dependencies.app, name="dependencies") +app.add_typer(deploys.app, name="deploys") app.add_typer(raw.app, name="raw") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py index f3b357e..382939b 100644 --- a/cortexapps_cli/commands/dependencies.py +++ b/cortexapps_cli/commands/dependencies.py @@ -38,26 +38,28 @@ def create( client = ctx.obj["client"] - params = { - } - - if method: - params["method"] = method - if path: - params["path"] = path - - data = { - "description": "", - "metadata": { - } - } if file_input: + if description or metadata or method or path or caller_tag or callee_tag: + raise typer.BadParameter("When providing a dependencies input file, do not specify any other dependency event attributes") data = json.loads("".join([line for line in file_input])) + else: + params = { + } + + if method: + params["method"] = method + if path: + params["path"] = path + + data = { + "description": "", + "metadata": { + } + } - # if metadata provided in file and command line, command line takes precedence if metadata: - data["metadata"] = data["metadata"] | dict(metadata) + data["metadata"] = dict(metadata) if description: data["description"] = description diff --git a/cortexapps_cli/commands/deploys.py b/cortexapps_cli/commands/deploys.py new file mode 100644 index 0000000..253c6ab --- /dev/null +++ b/cortexapps_cli/commands/deploys.py @@ -0,0 +1,233 @@ +from collections import defaultdict +from datetime import datetime +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Deploys commands") + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return "" + result = {} + for value in values: + k, v = value.split('=') + result[k] = v + return result.items() + +class Type(str, Enum): + DEPLOY = "DEPLOY" + SCALE = "SCALE" + ROLLBACK = "ROLLBACK" + RESTART = "RESTART" + +@app.command() +def delete_by_filter( + ctx: typer.Context, + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), +): + """ + Filter and delete deploys by SHA hash, environment or type + """ + + client = ctx.obj["client"] + + if not sha and not environment and not type: + raise typer.BadParameter("At least one of sha, environment or type must be provided.") + + params = { + "environment": environment, + "sha": sha, + "type": type + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.delete("api/v1/catalog/deploys", params=params) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Dangerous endpoint that blows away deploys for all entities + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/deploys/all") + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), +): + """ + Delete deployments for entity + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/" + tag + "/deploys") + +# 'list' is a keyword in python; naming the function 'list' will cause problems like this: +# TypeError: 'function' object is not subscriptable +# +# Because of this subsequent line in the file: +# customData: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs"), +# +# There is a collision between naming this function 'list' and then expecting to use list as the python built-in. +@app.command("list") +def deploys_list( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + List deployments for entity + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag + "/deploys", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/deploys", params=params) + + print_json(json.dumps(r)) + +@app.command() +def add( + ctx: typer.Context, + customData: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs"), + email: str = typer.Option(None, "--email", "-m", help="Email address of deployer"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + name: str = typer.Option(None, "--name", "-n", help="Name of deployer"), + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + title: str = typer.Option(..., "--title", "-ti", help="The title of the deploy"), + type: Type = typer.Option(..., "--type", "-ty", help="The type of the deploy"), + url: str = typer.Option(None, "--url", "-u", help="The Uniform Resource Locator(URL) of the deploy") +): + """ + Add deployment for entity + """ + + client = ctx.obj["client"] + + if file_input: + if customData or email or environment or name or sha or timestamp or title or type or url: + raise typer.BadParameter("When providing a deploy input file, do not specify any other deploy event attributes") + data = json.loads("".join([line for line in file_input])) + else: + + data = { + "timestamp": timestamp, + "title": title, + "type": type.value, + "sha": sha, + "environment": environment, + "deployer": { + "email": email, + "name": name + }, + "customData": { + } + } + + if customData: + data["customData"] = dict(customData) + data["timestamp"] = data["timestamp"].strftime('%Y-%m-%dT%H:%M:%SZ') + + r = client.post("api/v1/catalog/" + tag + "/deploys", data=data) + print_json(json.dumps(r)) + +@app.command() +def delete_by_uuid( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + uuid: str = typer.Option(..., "--uuid", "-uu", help="The Universally Unique Identifier (UUID) of the deploy") +): + """ + Delete deployment by uuid + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/" + tag + "/deploys/" + uuid) + +@app.command() +def update_by_uuid( + ctx: typer.Context, + customData: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of optional custom metadata key=value pairs"), + email: str = typer.Option(None, "--email", "-m", help="Email address of deployer"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing dependency metadata; can be passed as stdin with -, example: -f-")] = None, + name: str = typer.Option(None, "--name", "-n", help="Name of deployer"), + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + title: str = typer.Option(..., "--title", "-ti", help="The title of the deploy"), + deploy_type: Type = typer.Option(..., "--type", "-ty", help="The type of the deploy"), + url: str = typer.Option(None, "--url", "-u", help="The Uniform Resource Locator(URL) of the deploy"), + uuid: str = typer.Option(..., "--uuid", "-uu", help="The Universally Unique Identifier (UUID) of the deploy") +): + """ + Update deployment for entity + """ + + client = ctx.obj["client"] + + if file_input: + if customData or email or environment or name or sha or timestamp or title or deploy_type or url: + raise typer.BadParameter("When providing a deploy input file, do not specify any other deploy event attributes") + data = json.loads("".join([line for line in file_input])) + else: + + data = { + "environment": environment, + "sha": sha, + "type": deploy_type.value, + "timestamp": timestamp, + "title": title + } + + # remove any data valus that are None + data = {k: v for k, v in data.items() if v is not None} + + if customData: + data["customData"] = dict(customData) + if email or name: + data["deployer"] = {} + if email: + data["deployer"]["email"] = email + if name: + data["deployer"]["name"] = name + data["timestamp"] = data["timestamp"].strftime('%Y-%m-%dT%H:%M:%SZ') + + r = client.put("api/v1/catalog/" + tag + "/deploys/" + uuid, data=data) + print_json(json.dumps(r)) From 2908f354589eb3ee1aebcc61ad19502a45f7af18 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Tue, 8 Oct 2024 11:16:11 -0400 Subject: [PATCH 14/56] new fetch in client, CSV and table output in utils.py --- cortexapps_cli/commands/catalog.py | 132 +++++++++++++++++++++++----- cortexapps_cli/cortex_client.py | 82 ++++++++++-------- cortexapps_cli/utils.py | 135 +++++++++++++++++++++++++++++ 3 files changed, 292 insertions(+), 57 deletions(-) create mode 100644 cortexapps_cli/utils.py diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index ac13072..430bb94 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -1,28 +1,121 @@ import typer +from typing import Optional, List +from typing_extensions import Annotated -from rich import print_json +from cortexapps_cli.utils import print_output app = typer.Typer(help="Catalog commands") -@app.command() -def list( +class ListCommandOptions: + table_output = Annotated[ + Optional[bool], + typer.Option("--table", help="Output the response as a table", show_default=False) + ] + csv_output = Annotated[ + Optional[bool], + typer.Option("--csv", help="Output the response as CSV", show_default=False) + ] + columns = Annotated[ + Optional[List[str]], + typer.Option("--columns", "-C", help="Columns to include in the table, in the format HeaderName=jsonpath", show_default=False) + ] + filter = Annotated[ + Optional[List[str]], + typer.Option("--filter", "-F", help="Filters to apply on rows, in the format jsonpath=regex", show_default=False) + ] + page = Annotated[ + Optional[int], + typer.Option("--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages", show_default=False) + ] + page_size = Annotated[ + Optional[int], + typer.Option("--page-size", "-z", help="Page size for results", show_default=False) + ] + + +class CatalogCommandOptions: + include_archived = Annotated[ + Optional[bool], + typer.Option("--include-archived", "-a", help="Include archived entities", show_default=False) + ] + hierarchy_depth = Annotated[ + Optional[str], + typer.Option("--hierarchy-depth", "-d", help="Depth of the parent / children hierarchy nodes. Can be 'full' or a valid integer", show_default=False) + ] + groups = Annotated[ + Optional[str], + typer.Option("--groups", "-g", help="Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups", show_default=False) + ] + owners = Annotated[ + Optional[str], + typer.Option("--owners", "-o", help="Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names", show_default=False) + ] + include_hierarchy_fields = Annotated[ + Optional[str], + typer.Option("--include-hierarchy-fields", "-i", help="List of sub fields to include for hierarchies. Only supports 'groups'", show_default=False) + ] + include_nested_fields = Annotated[ + Optional[str], + typer.Option("--include-nested-fields", "-in", help="List of sub fields to include for different types, for example team:members", show_default=False) + ] + include_owners = Annotated[ + Optional[bool], + typer.Option("--include-owners", "-io", help="Include ownership information for each entity in the response", show_default=False) + ] + include_links = Annotated[ + Optional[bool], + typer.Option("--include-links", "-l", help="Include links for each entity in the response", show_default=False) + ] + include_metadata = Annotated[ + Optional[bool], + typer.Option("--include-metadata", "-m", help="Include custom data for each entity in the response", show_default=False) + ] + git_repositories = Annotated[ + Optional[str], + typer.Option("--git-repositories", "-r", help="Supports only GitHub repositories in the org/repo format", show_default=False) + ] + types = Annotated[ + Optional[str], + typer.Option("--types", "-t", help="Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.", show_default=False) + ] + +@app.command(name="list") +def catalog_list( ctx: typer.Context, - include_archived: bool = typer.Option(False, "--include-archived", "-a", help="Include archived entities"), - hierarchy_depth: str = typer.Option('full', "--hierarchy-depth", "-d", help="Depth of the parent / children hierarchy nodes. Can be 'full' or a valid integer"), - groups: str = typer.Option(None, "--groups", "-g", help="Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups"), - owners: str = typer.Option(None, "--owners", "-o", help="Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names"), - include_hierarchy_fields: str = typer.Option(None, "--include-hierarchy-fields", "-i", help="List of sub fields to include for hierarchies. Only supports 'groups'"), - include_nested_fields: str = typer.Option(None, "--include-nested-fields", "-in", help="List of sub fields to include for different types, for example team:members"), - include_owners: bool = typer.Option(False, "--include-owners", "-io", help="Include ownership information for each entity in the response"), - include_links: bool = typer.Option(False, "--include-links", "-l", help="Include links for each entity in the response"), - include_metadata: bool = typer.Option(False, "--include-metadata", "-m", help="Include custom data for each entity in the response"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), - git_repositories: str = typer.Option(None, "--git-repositories", "-r", help="Supports only GitHub repositories in the org/repo format"), - types: str = typer.Option(None, "--types", "-t", help="Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor."), + include_archived: CatalogCommandOptions.include_archived = False, + hierarchy_depth: CatalogCommandOptions.hierarchy_depth = 'full', + groups: CatalogCommandOptions.groups = None, + owners: CatalogCommandOptions.owners = None, + include_hierarchy_fields: CatalogCommandOptions.include_hierarchy_fields = None, + include_nested_fields: CatalogCommandOptions.include_nested_fields = None, + include_owners: CatalogCommandOptions.include_owners = False, + include_links: CatalogCommandOptions.include_links = False, + include_metadata: CatalogCommandOptions.include_metadata = False, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + git_repositories: CatalogCommandOptions.git_repositories = None, + types: CatalogCommandOptions.types = None, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filter = [], ): client = ctx.obj["client"] + if table_output and csv_output: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if (table_output or csv_output) and not columns: + columns = [ + "ID=id", + "Tag=tag", + "Name=name", + "Type=type", + "Git Repository=git.repository", + ] + + output_format = "table" if table_output else "csv" if csv_output else "json" + params = { "includeArchived": include_archived, "hierarchyDepth": hierarchy_depth, @@ -38,7 +131,7 @@ def list( "gitRepositories": git_repositories, "types": types, } - + # remove any params that are None params = {k: v for k, v in params.items() if v is not None} @@ -50,10 +143,9 @@ def list( if page is None: # if page is not specified, we want to fetch all pages r = client.fetch("api/v1/catalog", params=params) - pass else: # if page is specified, we want to fetch only that page r = client.get("api/v1/catalog", params=params) - pass - print_json(data=r) + data = r if output_format == 'json' else r.get('entities', []) + print_output(data=data, columns=columns, filters=filters, output_format=output_format) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index c349796..c74ae4b 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -8,39 +8,32 @@ def __init__(self, api_key, base_url='https://api.getcortexapp.com'): self.api_key = api_key self.base_url = base_url - # Might need to rethink logic. Not all gets have an indicator as the - # last path element, ie custom-metrics. - def data_key_for_endpoint(self, endpoint): - api_paths = endpoint.split('/') - if "eng-intel" in api_paths: - return 'data' - - #end_endpoint = endpoint.split('/')[-1] - end_endpoint = api_paths[-1] - match end_endpoint: - # https://api.getcortexapp.com/api/v1/catalog - case 'catalog': - return 'entities' - # https://api.getcortexapp.com/api/v1/audit-logs - case 'audit-logs': - return 'logs' - # https://api.getcortexapp.com/api/v1/catalog/:tagOrId/deploys - case 'deploys': - return 'deployments' - # https://api.getcortexapp.com/api/v1/catalog/:tagOrId/custom-data - case 'custom-data': - return '' - # https://api.getcortexapp.com/api/v1/catalog/:tagOrId/custom-events - case 'custom-events': - return 'events' - # https://api.getcortexapp.com/api/v1/eng-intel/custom-metrics/:customMetricKey/entity/:tagOrId - case 'custom-metrics': - return 'data' - # https://api.getcortexapp.com/api/v1/catalog/:callerTag/dependencies - case 'dependencies': - return 'dependencies' - case _: - return end_endpoint + def guess_data_key(self, response: list | dict): + """ + Guess the key of the data list in a paginated response. + + Args: + response (list or dict): The response to guess the data key from. + + Returns: + The key of the data list in the response. + """ + if isinstance(response, list): + # if the response is a list, there is no data key + return '' + if isinstance(response, dict): + # if the response is a dict, it should have exactly one key whose value is a list + data_keys = [k for k, v in response.items() if isinstance(v, list)] + if len(data_keys) == 0: + # if no such key is found, raise an error + raise ValueError(f"Response dict does not contain a list: {response}") + if len(data_keys) > 1: + # if more than one such key is found, raise an error + raise ValueError(f"Response dict contains multiple lists: {response}") + return data_keys[0] + + # if the response is neither a list nor a dict, raise an error + raise ValueError(f"Response is not a list or dict: {response}") def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=False, raw_response=False, content_type='application/json'): req_headers = { @@ -101,20 +94,35 @@ def fetch(self, endpoint, params={}, headers={}): # param page is page number, param pageSize is page size, default 250 page = 0 page_size = 250 - data_key = self.data_key_for_endpoint(endpoint) + data_key = None data = [] while True: response = self.get(endpoint, params={**params, 'page': page, 'pageSize': page_size}, headers=headers) - # Some endpoints just return an array as the root element. + if not (isinstance(response, dict) or isinstance(response, list)): + # something is terribly wrong; this is definitely not a paginated response + break + + if data_key is None: + # first page, guess the data key + data_key = self.guess_data_key(response) + + # Some endpoints just return an array as the root element. In those cases, data_key is '' if data_key == '': + # if the data key is empty, the response is a list; an empty list means no more data + if len(response) == 0: + break data.extend(response) else: if data_key not in response or not response[data_key]: break data.extend(response[data_key]) - if response['totalPages'] == page + 1: - break + if response['totalPages'] == page + 1: + break page += 1 + + if data_key == '': + return data + return { "total": len(data), "page": 0, diff --git a/cortexapps_cli/utils.py b/cortexapps_cli/utils.py new file mode 100644 index 0000000..e026b26 --- /dev/null +++ b/cortexapps_cli/utils.py @@ -0,0 +1,135 @@ +import csv +import json +import re +import sys +import typer + +from rich import print_json +from rich.table import Table +from rich.console import Console + +def get_value_at_path(data, path): + """ + Get the value at a specified path in a nested dictionary. + + Args: + data (dict): The input dictionary. + path (str): The path to the desired value, separated by dots. + + Returns: + The value at the specified path or None if the path doesn't exist. + """ + keys = path.split(".") + current = data + + try: + for key in keys: + if isinstance(current, dict): + current = current.get(key) + elif isinstance(current, list): + key = int(key) + current = current[key] + else: + return None + return current + except: + return None + +def matches_filters(data, filters): + """ + Check if a dictionary matches a list of filters. + + Args: + data (dict): The dictionary to check. + filters (list): A list of filters in the format jsonpath=regex. + + Returns: + True if the dictionary matches all filters, False otherwise. + """ + if not filters: + return True + for filter in filters: + jsonpath, regex = filter.split("=") + value = get_value_at_path(data, jsonpath) + if value is None: + return False + if not re.match(regex, str(value)): + return False + return True + +def humanize_value(value): + """ + Convert a value to a human-readable string. + + Args: + value: The value to convert. + + Returns: + A human-readable string representation of the value. + """ + if value is None: + return "" + if isinstance(value, list): + return ', '.join([str(x) for x in value]) + if isinstance(value, dict): + return json.dumps(value, indent=2) + return str(value) + +def print_output(data, columns=None, filters=None, output_format='json'): + """ + Print output in the specified format. + + Args: + data: The data to print. + columns: A list of columns to include in the output. + filters: A list of filters to apply to the data. + output_format: The format to print the data in. + """ + + if not output_format in ['json', 'table', 'csv']: + raise ValueError("Invalid output format. Must be one of: json, table, csv") + + if output_format == 'json': + if columns: + raise typer.BadParameter("Columns can only be specified when using --table or --csv") + if filters: + raise typer.BadParameter("Filters can only be specified when using --table or --csv") + print_json(data=data) + return + + if not columns: + raise typer.BadParameter("Columns must be specified when using --table or --csv") + + for idx, column in enumerate(columns): + if not re.match(r"^[a-zA-Z0-9_. ]+=[a-zA-Z0-9_.]+$", column): + if re.match(r"^[a-zA-Z0-9_.]+$", column): + # if no column name is specified and it's a valid jsonpath, use the jsonpath as the column name + columns[idx] = f"{column}={column}" + else: + raise typer.BadParameter("Columns must be in the format HeaderName=jsonpath") + + if filters: + for filter in filters: + if not re.match(r"^[a-zA-Z0-9_.]+=.+$", filter): + raise typer.BadParameter("Filters must be in the format jsonpath=regex") + + column_headers = [x.split('=')[0] for x in columns] + column_accessors = [x.split('=')[1] for x in columns] + rows = [] + + for item in data: + if matches_filters(item, filters): + rows.append([humanize_value(get_value_at_path(item, accessor)) for accessor in column_accessors]) + + if output_format == 'table': + table = Table() + for header in column_headers: + table.add_column(header) + for row in rows: + table.add_row(*row) + console = Console() + console.print(table) + elif output_format == 'csv': + csv_writer = csv.writer(sys.stdout) + csv_writer.writerow(column_headers) + csv_writer.writerows(rows) From 0c38697f0615d665b1c203e0461fad785e6fdfa4 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Tue, 8 Oct 2024 14:08:18 -0700 Subject: [PATCH 15/56] Modify deploys tests to support typer --- cortexapps_cli/commands/deploys.py | 43 +++++++++++----- tests/test_deploys.py | 83 ++++++++++++++++++++---------- 2 files changed, 86 insertions(+), 40 deletions(-) diff --git a/cortexapps_cli/commands/deploys.py b/cortexapps_cli/commands/deploys.py index 253c6ab..0a0424e 100644 --- a/cortexapps_cli/commands/deploys.py +++ b/cortexapps_cli/commands/deploys.py @@ -72,6 +72,9 @@ def delete_all( def delete( ctx: typer.Context, tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), + environment: str = typer.Option(None, "--environment", "-e", help="The name of the environment"), + type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), ): """ Delete deployments for entity @@ -79,7 +82,16 @@ def delete( client = ctx.obj["client"] - client.delete("api/v1/catalog/" + tag + "/deploys") + params = { + "environment": environment, + "sha": sha, + "type": type + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.delete("api/v1/catalog/" + tag + "/deploys", params=params) # 'list' is a keyword in python; naming the function 'list' will cause problems like this: # TypeError: 'function' object is not subscriptable @@ -129,8 +141,8 @@ def add( sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), - title: str = typer.Option(..., "--title", "-ti", help="The title of the deploy"), - type: Type = typer.Option(..., "--type", "-ty", help="The type of the deploy"), + title: str = typer.Option(None, "--title", "-ti", help="The title of the deploy"), + type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), url: str = typer.Option(None, "--url", "-u", help="The Uniform Resource Locator(URL) of the deploy") ): """ @@ -140,23 +152,24 @@ def add( client = ctx.obj["client"] if file_input: - if customData or email or environment or name or sha or timestamp or title or type or url: + if email or environment or name or sha or title or type or url: raise typer.BadParameter("When providing a deploy input file, do not specify any other deploy event attributes") data = json.loads("".join([line for line in file_input])) else: data = { - "timestamp": timestamp, - "title": title, - "type": type.value, - "sha": sha, - "environment": environment, + "customData": { + }, "deployer": { "email": email, "name": name }, - "customData": { - } + "environment": environment, + "sha": sha, + "timestamp": timestamp, + "title": title, + "type": type.value, + "url": url } if customData: @@ -191,8 +204,8 @@ def update_by_uuid( sha: str = typer.Option(None, "--sha", "-s", help="The Secure Hash Algorithm (SHA) of the deploy"), tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), - title: str = typer.Option(..., "--title", "-ti", help="The title of the deploy"), - deploy_type: Type = typer.Option(..., "--type", "-ty", help="The type of the deploy"), + title: str = typer.Option(None, "--title", "-ti", help="The title of the deploy"), + deploy_type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), url: str = typer.Option(None, "--url", "-u", help="The Uniform Resource Locator(URL) of the deploy"), uuid: str = typer.Option(..., "--uuid", "-uu", help="The Universally Unique Identifier (UUID) of the deploy") ): @@ -203,11 +216,13 @@ def update_by_uuid( client = ctx.obj["client"] if file_input: - if customData or email or environment or name or sha or timestamp or title or deploy_type or url: + if customData or email or environment or name or sha or title or deploy_type or url: raise typer.BadParameter("When providing a deploy input file, do not specify any other deploy event attributes") data = json.loads("".join([line for line in file_input])) else: + if not title or tag or deploy_type: + raise typer.BadParameter("When not providing a deploy input file, title and tag are required") data = { "environment": environment, "sha": sha, diff --git a/tests/test_deploys.py b/tests/test_deploys.py index 1913148..b8c21b1 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -1,44 +1,75 @@ -from common import * +from typer.testing import CliRunner +import json -def _add_deploy(capsys): - cli_command(capsys, ["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys.json"]) +from cortexapps_cli.cli import app -def _delete_all(capsys): - cli_command(capsys, ["deploys", "delete-all"]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) +runner = CliRunner() + +def _json_response(arr): + response = runner.invoke(app, arr) + return json.loads(response.stdout) + +def _add_deploy(): + runner.invoke(app, ["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys.json"]) + +def _delete_all(): + runner.invoke(app, ["deploys", "delete-all"]) + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) assert len(response['deployments']) == 0, "All deployments for entity should be deleted" -def test_deploys(capsys): - _delete_all(capsys) +def test_deploys(): + _delete_all() - response = cli_command(capsys, ["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) + response = _json_response(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) uuid = response['uuid'] - # Add a second deploy. - _add_deploy(capsys) - - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) + _add_deploy() + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) assert any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should find a deploy with uuid" assert response['total'] == 2, "Two deploys should be returned for entity" - cli_command(capsys, ["deploys", "update-by-uuid", "-t", "shipping-integrations", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) + runner.invoke(app, ["deploys", "update-by-uuid", "-t", "shipping-integrations", "-uu", uuid, "-f", "data/run-time/deploys-update.json"]) + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" - - cli_command(capsys, ["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-u", uuid]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) + + runner.invoke(app, ["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-uu", uuid]) + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) assert not any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should not find a deploy with uuid" assert response['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" - _add_deploy(capsys) - cli_command(capsys, ["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) - response = cli_command(capsys, ["deploys", "list", "-t", "shipping-integrations"]) + _add_deploy() + runner.invoke(app, ["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) assert not any(deploy['sha'] == "SHA-123456" for deploy in response['deployments']), "Should not find a deploy with sha that was deleted" - _add_deploy(capsys) - cli_command(capsys, ["deploys", "delete-filter", "-y", "DEPLOY"]) - assert not any(deploy['type'] == "DEPLOY" for deploy in response['deployments']), "Should not find a deploy type 'DEPLOY' sha that was deleted" + _add_deploy() + runner.invoke(app, ["deploys", "delete-by-filter", "-ty", "DEPLOY"]) + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + assert not any(deploy['type'] == "DEPLOY" for deploy in response['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" + + response = _json_response(["deploys", "add", + "-t", "shipping-integrations", + "--email", "julien@tpb.com", + "--name", "Julien", + "--environment", "PYPI.org", + "--sha", "SHA-123456", + "--title", "my title", + "--type", "DEPLOY", + "--url", "https://tpb.com", + "-c", "abc=123", + "-c", "def=456"]) + uuid = response['uuid'] + response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] + assert deploy[0]['sha'] == "SHA-123456", "Should find a deploy with sha" + assert deploy[0]['deployer']['email'] == "julien@tpb.com", "Email should be set for deploy" + assert deploy[0]['deployer']['name'] == "Julien", "Name should be set for deploy" + assert deploy[0]['environment'] == "PYPI.org", "environment should be set for deploy" + assert deploy[0]['title'] == "my title", "title should be set for deploy" + assert deploy[0]['type'] == "DEPLOY", "type should be set for deploy" + assert deploy[0]['customData']['abc'] == "123", "Custom data field should be populated" + assert deploy[0]['customData']['def'] == "456", "Custom data field should be populated" - _add_deploy(capsys) - _delete_all(capsys) + _add_deploy() + _delete_all() From d16568645377422d4a7eded1b71fbfbb13b00bb8 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Tue, 8 Oct 2024 16:49:46 -0700 Subject: [PATCH 16/56] Add tests --- cortexapps_cli/commands/catalog.py | 42 ++++++++++++- cortexapps_cli/commands/custom_metrics.py | 8 +-- tests/helpers/utils.py | 24 ++++++++ tests/test_audit_logs.py | 6 +- tests/test_audit_logs_dates.py | 8 +-- tests/test_audit_logs_end_date.py | 6 +- tests/test_audit_logs_page.py | 6 +- tests/test_audit_logs_size.py | 6 +- tests/test_audit_logs_start_date.py | 6 +- ...st_custom_data_create_or_update_in_bulk.py | 11 ++-- tests/test_custom_data_delete.py | 12 ++-- tests/test_custom_data_list.py | 6 +- tests/test_custom_metrics.py | 13 ++++ tests/test_dependencies.py | 28 ++++----- tests/test_deploys.py | 61 ++++++++----------- 15 files changed, 155 insertions(+), 88 deletions(-) create mode 100644 tests/helpers/utils.py create mode 100644 tests/test_custom_metrics.py diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 430bb94..0bac73d 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -76,7 +76,7 @@ class CatalogCommandOptions: ] types = Annotated[ Optional[str], - typer.Option("--types", "-t", help="Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.", show_default=False) + typer.Option("--types", "-ty", help="Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.", show_default=False) ] @app.command(name="list") @@ -149,3 +149,43 @@ def catalog_list( data = r if output_format == 'json' else r.get('entities', []) print_output(data=data, columns=columns, filters=filters, output_format=output_format) + +@app.command() +def details( + ctx: typer.Context, + hierarchy_depth: CatalogCommandOptions.hierarchy_depth = 'full', + include_hierarchy_fields: CatalogCommandOptions.include_hierarchy_fields = None, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filter = [], +): + client = ctx.obj["client"] + + if table_output and csv_output: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if (table_output or csv_output) and not columns: + columns = [ + "ID=id", + "Tag=tag", + "Name=name", + "Type=type", + "Git Repository=git.repository", + ] + + output_format = "table" if table_output else "csv" if csv_output else "json" + + params = { + "hierarchyDepth": hierarchy_depth, + "includeHierarchyFields": include_hierarchy_fields + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/catalog/" + tag, params=params) + + data = r if output_format == 'json' else [r] + print_output(data=data, columns=columns, filters=filters, output_format=output_format) diff --git a/cortexapps_cli/commands/custom_metrics.py b/cortexapps_cli/commands/custom_metrics.py index db92759..426d12f 100644 --- a/cortexapps_cli/commands/custom_metrics.py +++ b/cortexapps_cli/commands/custom_metrics.py @@ -81,10 +81,10 @@ def add( data = { "timestamp": timestamp, - "vaue": value + "value": value } - params = _convert_datetime_to_string(params) + data = _convert_datetime_to_string(data) r = client.post("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, data=data) @@ -93,7 +93,7 @@ def add_in_bulk( ctx: typer.Context, custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, - series: list[str] | None = typer.Option(None, "--custom", "-c", callback=_parse_key_value, help="List of timestamp=value pairs."), + series: list[str] | None = typer.Option(None, "--value", "-v", callback=_parse_key_value, help="List of timestamp=value pairs."), tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), ): """ @@ -124,7 +124,7 @@ def delete( custom_metric_key: str = typer.Option(..., "--custom-metric-key", "-k", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), start_date: datetime = typer.Option(None, "--start-date", "-s", help="Start date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), - end_date: datetime = typer.Option(None, "--end-date", "-s", help="End date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + end_date: datetime = typer.Option(None, "--end-date", "-e", help="End date for the deletion (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), ): """ Delete custom metric data points for entity diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py new file mode 100644 index 0000000..ca3f029 --- /dev/null +++ b/tests/helpers/utils.py @@ -0,0 +1,24 @@ +from cortexapps_cli.cli import app +from datetime import datetime +from datetime import timedelta +from datetime import timezone +import json +from typer.testing import CliRunner + +runner = CliRunner() + +def today(): + return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S") + +def yesterday(): + today = datetime.now(timezone.utc) + yesterday = today - timedelta(days = 1) + return yesterday.strftime("%Y-%m-%dT%H:%M:%S") + +def json_response(params): + print("params = " + str(params)) + response = runner.invoke(app, params) + return json.loads(response.stdout) + +def cli(params): + runner.invoke(app, params) diff --git a/tests/test_audit_logs.py b/tests/test_audit_logs.py index c3da846..7d86a75 100644 --- a/tests/test_audit_logs.py +++ b/tests/test_audit_logs.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["audit-logs", "get",]) +def test(): + response = json_response(["audit-logs", "get"]) assert (len(response['logs']) > 0) diff --git a/tests/test_audit_logs_dates.py b/tests/test_audit_logs_dates.py index 7d5b8c1..90c4eca 100644 --- a/tests/test_audit_logs_dates.py +++ b/tests/test_audit_logs_dates.py @@ -1,7 +1,7 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - start_date = yesterday() +def test(): end_date = today() - response = cli_command(capsys, ["audit-logs", "get", "-s", start_date, "-e", end_date]) + start_date = yesterday() + response = json_response(["audit-logs", "get", "-s", start_date, "-e", end_date]) assert (len(response['logs']) > 0) diff --git a/tests/test_audit_logs_end_date.py b/tests/test_audit_logs_end_date.py index 3e51fd5..c5ab52d 100644 --- a/tests/test_audit_logs_end_date.py +++ b/tests/test_audit_logs_end_date.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): +def test(): end_date = today() - response = cli_command(capsys, ["audit-logs", "get", "-e", end_date]) + response = json_response(["audit-logs", "get", "-e", end_date]) assert (len(response['logs']) > 0) diff --git a/tests/test_audit_logs_page.py b/tests/test_audit_logs_page.py index e9a8aad..c47e476 100644 --- a/tests/test_audit_logs_page.py +++ b/tests/test_audit_logs_page.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["audit-logs", "get", "-p", "0",]) +def test(): + response = json_response(["audit-logs", "get", "-p", "0"]) assert (len(response['logs']) > 0) diff --git a/tests/test_audit_logs_size.py b/tests/test_audit_logs_size.py index baa7b01..6d975da 100644 --- a/tests/test_audit_logs_size.py +++ b/tests/test_audit_logs_size.py @@ -1,5 +1,5 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["audit-logs", "get", "-z", "1"]) +def test(): + response = json_response(["audit-logs", "get", "-p", "0", "-z", "1"]) assert (len(response['logs']) == 1) diff --git a/tests/test_audit_logs_start_date.py b/tests/test_audit_logs_start_date.py index af27a64..0b73cf2 100644 --- a/tests/test_audit_logs_start_date.py +++ b/tests/test_audit_logs_start_date.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): +def test(): start_date = yesterday() - response = cli_command(capsys, ["audit-logs", "get", "-s", start_date]) + response = json_response(["audit-logs", "get", "-s", start_date]) assert (len(response['logs']) > 0) diff --git a/tests/test_custom_data_create_or_update_in_bulk.py b/tests/test_custom_data_create_or_update_in_bulk.py index 75653b8..efeb8cb 100644 --- a/tests/test_custom_data_create_or_update_in_bulk.py +++ b/tests/test_custom_data_create_or_update_in_bulk.py @@ -1,13 +1,12 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - capsys.readouterr() +def test(): + cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) + response = json_response(["catalog", "details", "-t", "backend-worker"]) list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-1"] assert list[0]['value'] == "value-1" - response = cli_command(capsys, ["catalog", "details", "-t", "ach-payments-nacha"]) + response = json_response( ["catalog", "details", "-t", "ach-payments-nacha"]) list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-4"] assert list[0]['value'] == "value-4" diff --git a/tests/test_custom_data_delete.py b/tests/test_custom_data_delete.py index 98ecae9..e338206 100644 --- a/tests/test_custom_data_delete.py +++ b/tests/test_custom_data_delete.py @@ -1,12 +1,12 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli_command(capsys, ["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) +def test(): + cli(["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) - response = cli_command(capsys, ["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) + response = json_response(["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) assert response['value'] == "yes" - cli(["-q", "custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) + cli(["custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) - response = cli_command(capsys, ["catalog", "details", "-t", "recommendations"]) + response = json_response(["catalog", "details", "-t", "recommendations"]) assert not any(metadata['key'] == 'delete-me' for metadata in response['metadata']) diff --git a/tests/test_custom_data_list.py b/tests/test_custom_data_list.py index 259fd49..ae15abc 100644 --- a/tests/test_custom_data_list.py +++ b/tests/test_custom_data_list.py @@ -1,6 +1,6 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) +def test(): + response = json_response(["catalog", "details", "-t", "backend-worker"]) list = [metadata for metadata in response['metadata'] if metadata['key'] == "cicd"] assert list[0]['value'] == "circle-ci" diff --git a/tests/test_custom_metrics.py b/tests/test_custom_metrics.py new file mode 100644 index 0000000..840e17b --- /dev/null +++ b/tests/test_custom_metrics.py @@ -0,0 +1,13 @@ +from tests.helpers.utils import * + +def test(): + cli(["custom-metrics", "delete", "-t", "shipping-integrations", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) + cli(["custom-metrics", "add", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "3.0"]) + response = json_response(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) + assert response['data'][0]['value'] == 3.0, "should have single value of 3.0" + + cli(["custom-metrics", "add-in-bulk", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "2024-07-01T00:00:00=1.0", "-v", "2024-08-01T00:00:00=2.0"]) + response = json_response(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) + assert response['total'] == 3, "should have total of 3 metrics data points" + print("There is not a good way to test this today because there is a pre-requisite that the custom metric already exists.") + print("If you manually create the custom metric named 'vulnerabilities' you can run these tests.") diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index d59f863..a475a07 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,26 +1,26 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): +def test(): callerTag = "fraud-analyzer" calleeTag = "backend-worker" - cli(["-q", "dependencies", "delete-all", "-r", callerTag]) + cli(["dependencies", "delete-all", "-r", callerTag]) - cli_command(capsys, ["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) + cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli_command(capsys, ["dependencies", "add", "-r", callerTag, "-e", - calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies.json"]) - cli_command, (["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) - response = cli_command(capsys, ["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) + cli(["dependencies", "add", "-r", callerTag, "-e", + calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies.json"]) + cli(["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) + response = json_response(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) assert response["callerTag"] == callerTag, "callerTag should be " + callerTag assert response["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag - cli_command(capsys, ["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) + cli(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) - response = cli_command(capsys, ["dependencies", "get-all", "-r", "fraud-analyzer", "-o"]) + response = json_response(["dependencies", "get-all", "-r", "fraud-analyzer", "-o"]) assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in response["dependencies"]) - cli(["-q", "dependencies", "delete", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/audit-logs"]) - cli(["-q", "dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["-q", "dependencies", "delete-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["-q", "dependencies", "delete-all", "-r", "fraud-analyzer"]) + cli(["dependencies", "delete", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/audit-logs"]) + cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) + cli(["dependencies", "delete-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) + cli(["dependencies", "delete-all", "-r", "fraud-analyzer"]) diff --git a/tests/test_deploys.py b/tests/test_deploys.py index b8c21b1..cdaa872 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -1,66 +1,57 @@ -from typer.testing import CliRunner -import json - -from cortexapps_cli.cli import app - -runner = CliRunner() - -def _json_response(arr): - response = runner.invoke(app, arr) - return json.loads(response.stdout) +from tests.helpers.utils import * def _add_deploy(): - runner.invoke(app, ["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys.json"]) + cli(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys.json"]) def _delete_all(): - runner.invoke(app, ["deploys", "delete-all"]) - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "delete-all"]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) assert len(response['deployments']) == 0, "All deployments for entity should be deleted" def test_deploys(): _delete_all() - response = _json_response(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) + response = json_response(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) uuid = response['uuid'] _add_deploy() - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) assert any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should find a deploy with uuid" assert response['total'] == 2, "Two deploys should be returned for entity" - runner.invoke(app, ["deploys", "update-by-uuid", "-t", "shipping-integrations", "-uu", uuid, "-f", "data/run-time/deploys-update.json"]) - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "update-by-uuid", "-t", "shipping-integrations", "-uu", uuid, "-f", "data/run-time/deploys-update.json"]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" - runner.invoke(app, ["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-uu", uuid]) - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-uu", uuid]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) assert not any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should not find a deploy with uuid" assert response['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" _add_deploy() - runner.invoke(app, ["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) assert not any(deploy['sha'] == "SHA-123456" for deploy in response['deployments']), "Should not find a deploy with sha that was deleted" _add_deploy() - runner.invoke(app, ["deploys", "delete-by-filter", "-ty", "DEPLOY"]) - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "delete-by-filter", "-ty", "DEPLOY"]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) assert not any(deploy['type'] == "DEPLOY" for deploy in response['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" - response = _json_response(["deploys", "add", - "-t", "shipping-integrations", - "--email", "julien@tpb.com", - "--name", "Julien", - "--environment", "PYPI.org", - "--sha", "SHA-123456", - "--title", "my title", - "--type", "DEPLOY", - "--url", "https://tpb.com", - "-c", "abc=123", - "-c", "def=456"]) + response = json_response(["deploys", "add", + "-t", "shipping-integrations", + "--email", "julien@tpb.com", + "--name", "Julien", + "--environment", "PYPI.org", + "--sha", "SHA-123456", + "--title", "my title", + "--type", "DEPLOY", + "--url", "https://tpb.com", + "-c", "abc=123", + "-c", "def=456"]) uuid = response['uuid'] - response = _json_response(["deploys", "list", "-t", "shipping-integrations"]) + response = json_response(["deploys", "list", "-t", "shipping-integrations"]) deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-123456", "Should find a deploy with sha" assert deploy[0]['deployer']['email'] == "julien@tpb.com", "Email should be set for deploy" From 6223ad401b9a11f06c32a5930457f3b7fa690826 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 9 Oct 2024 13:29:46 -0700 Subject: [PATCH 17/56] Add test utils; clean up tests --- cortexapps_cli/commands/custom_events.py | 5 +++ cortexapps_cli/commands/dependencies.py | 1 - data/run-time/custom-events.json | 2 +- {tests => tests.orig2}/__init__.py | 0 {tests => tests.orig2}/common.py | 0 {tests => tests.orig2}/cortex_github.py | 0 {tests => tests.orig2}/feature_flag_check.py | 0 {tests => tests.orig2}/feature_flag_dump.py | 0 {tests => tests.orig2}/github_setup.py | 0 tests.orig2/test_audit_logs.py | 5 +++ tests.orig2/test_audit_logs_dates.py | 7 ++++ tests.orig2/test_audit_logs_end_date.py | 6 +++ tests.orig2/test_audit_logs_page.py | 5 +++ tests.orig2/test_audit_logs_size.py | 5 +++ tests.orig2/test_audit_logs_start_date.py | 6 +++ .../test_catalog_archive_entity.py | 0 .../test_catalog_create_entity.py | 0 .../test_catalog_create_entity_viewer.py | 0 .../test_catalog_delete_entity.py | 0 .../test_catalog_get_entity_details.py | 0 ...st_catalog_get_entity_details_hierarchy.py | 0 .../test_catalog_include_links.py | 0 .../test_catalog_include_metadata.py | 0 .../test_catalog_include_nested_fields.py | 0 .../test_catalog_invalid_page_size.py | 0 .../test_catalog_list_by_github_repo.py | 0 .../test_catalog_list_by_group_multiple.py | 0 .../test_catalog_list_by_group_single.py | 0 .../test_catalog_list_by_owners_multiple.py | 0 .../test_catalog_list_by_owners_single.py | 0 .../test_catalog_list_by_types.py | 0 .../test_catalog_list_entity_descriptors.py | 0 ...st_catalog_list_entity_descriptors_page.py | 0 ...talog_list_entity_descriptors_page_size.py | 0 ...st_catalog_list_entity_descriptors_yaml.py | 0 .../test_catalog_list_include_archived.py | 0 .../test_catalog_list_include_owners.py | 0 .../test_catalog_list_page.py | 0 .../test_catalog_list_page_size.py | 0 ...test_catalog_retrieve_entity_descriptor.py | 0 ...catalog_retrieve_entity_descriptor_yaml.py | 0 .../test_catalog_retrieve_entity_details.py | 0 ...etrieve_entity_details_hierarchy_fields.py | 0 ...atalog_retrieve_entity_scorecard_scores.py | 0 .../test_catalog_unarchive_entity.py | 0 {tests => tests.orig2}/test_config_file.py | 0 ...st_custom_data_create_or_update_in_bulk.py | 12 ++++++ tests.orig2/test_custom_data_delete.py | 12 ++++++ tests.orig2/test_custom_data_list.py | 6 +++ tests.orig2/test_custom_events_list.py | 15 +++++++ tests.orig2/test_custom_events_uuid.py | 26 ++++++++++++ {tests => tests.orig2}/test_docs.py | 0 {tests => tests.orig2}/test_entity_types.py | 0 {tests => tests.orig2}/test_github.py | 0 {tests => tests.orig2}/test_groups.py | 0 {tests => tests.orig2}/test_ip_allowlist.py | 0 {tests => tests.orig2}/test_packages.py | 0 {tests => tests.orig2}/test_packages_java.py | 0 {tests => tests.orig2}/test_plugins.py | 0 .../test_plugins_invalid.py | 0 {tests => tests.orig2}/test_scorecards.py | 0 .../test_scorecards_drafts.py | 0 {tests => tests.orig2}/test_teams.py | 0 tests/helpers/utils.py | 30 +++++++++++--- tests/test_audit_logs.py | 4 +- tests/test_audit_logs_dates.py | 4 +- tests/test_audit_logs_end_date.py | 4 +- tests/test_audit_logs_page.py | 4 +- tests/test_audit_logs_size.py | 4 +- tests/test_audit_logs_start_date.py | 4 +- ...st_custom_data_create_or_update_in_bulk.py | 8 ++-- tests/test_custom_data_delete.py | 8 ++-- tests/test_custom_data_list.py | 4 +- tests/test_custom_events_list.py | 21 +++++----- tests/test_custom_events_uuid.py | 34 ++++++++-------- tests/test_custom_metrics.py | 8 ++-- tests/test_dependencies.py | 13 +++--- tests/test_deploys.py | 40 +++++++++---------- 78 files changed, 213 insertions(+), 90 deletions(-) rename {tests => tests.orig2}/__init__.py (100%) rename {tests => tests.orig2}/common.py (100%) rename {tests => tests.orig2}/cortex_github.py (100%) rename {tests => tests.orig2}/feature_flag_check.py (100%) rename {tests => tests.orig2}/feature_flag_dump.py (100%) rename {tests => tests.orig2}/github_setup.py (100%) create mode 100644 tests.orig2/test_audit_logs.py create mode 100644 tests.orig2/test_audit_logs_dates.py create mode 100644 tests.orig2/test_audit_logs_end_date.py create mode 100644 tests.orig2/test_audit_logs_page.py create mode 100644 tests.orig2/test_audit_logs_size.py create mode 100644 tests.orig2/test_audit_logs_start_date.py rename {tests => tests.orig2}/test_catalog_archive_entity.py (100%) rename {tests => tests.orig2}/test_catalog_create_entity.py (100%) rename {tests => tests.orig2}/test_catalog_create_entity_viewer.py (100%) rename {tests => tests.orig2}/test_catalog_delete_entity.py (100%) rename {tests => tests.orig2}/test_catalog_get_entity_details.py (100%) rename {tests => tests.orig2}/test_catalog_get_entity_details_hierarchy.py (100%) rename {tests => tests.orig2}/test_catalog_include_links.py (100%) rename {tests => tests.orig2}/test_catalog_include_metadata.py (100%) rename {tests => tests.orig2}/test_catalog_include_nested_fields.py (100%) rename {tests => tests.orig2}/test_catalog_invalid_page_size.py (100%) rename {tests => tests.orig2}/test_catalog_list_by_github_repo.py (100%) rename {tests => tests.orig2}/test_catalog_list_by_group_multiple.py (100%) rename {tests => tests.orig2}/test_catalog_list_by_group_single.py (100%) rename {tests => tests.orig2}/test_catalog_list_by_owners_multiple.py (100%) rename {tests => tests.orig2}/test_catalog_list_by_owners_single.py (100%) rename {tests => tests.orig2}/test_catalog_list_by_types.py (100%) rename {tests => tests.orig2}/test_catalog_list_entity_descriptors.py (100%) rename {tests => tests.orig2}/test_catalog_list_entity_descriptors_page.py (100%) rename {tests => tests.orig2}/test_catalog_list_entity_descriptors_page_size.py (100%) rename {tests => tests.orig2}/test_catalog_list_entity_descriptors_yaml.py (100%) rename {tests => tests.orig2}/test_catalog_list_include_archived.py (100%) rename {tests => tests.orig2}/test_catalog_list_include_owners.py (100%) rename {tests => tests.orig2}/test_catalog_list_page.py (100%) rename {tests => tests.orig2}/test_catalog_list_page_size.py (100%) rename {tests => tests.orig2}/test_catalog_retrieve_entity_descriptor.py (100%) rename {tests => tests.orig2}/test_catalog_retrieve_entity_descriptor_yaml.py (100%) rename {tests => tests.orig2}/test_catalog_retrieve_entity_details.py (100%) rename {tests => tests.orig2}/test_catalog_retrieve_entity_details_hierarchy_fields.py (100%) rename {tests => tests.orig2}/test_catalog_retrieve_entity_scorecard_scores.py (100%) rename {tests => tests.orig2}/test_catalog_unarchive_entity.py (100%) rename {tests => tests.orig2}/test_config_file.py (100%) create mode 100644 tests.orig2/test_custom_data_create_or_update_in_bulk.py create mode 100644 tests.orig2/test_custom_data_delete.py create mode 100644 tests.orig2/test_custom_data_list.py create mode 100644 tests.orig2/test_custom_events_list.py create mode 100644 tests.orig2/test_custom_events_uuid.py rename {tests => tests.orig2}/test_docs.py (100%) rename {tests => tests.orig2}/test_entity_types.py (100%) rename {tests => tests.orig2}/test_github.py (100%) rename {tests => tests.orig2}/test_groups.py (100%) rename {tests => tests.orig2}/test_ip_allowlist.py (100%) rename {tests => tests.orig2}/test_packages.py (100%) rename {tests => tests.orig2}/test_packages_java.py (100%) rename {tests => tests.orig2}/test_plugins.py (100%) rename {tests => tests.orig2}/test_plugins_invalid.py (100%) rename {tests => tests.orig2}/test_scorecards.py (100%) rename {tests => tests.orig2}/test_scorecards_drafts.py (100%) rename {tests => tests.orig2}/test_teams.py (100%) diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py index dd62df8..7553892 100644 --- a/cortexapps_cli/commands/custom_events.py +++ b/cortexapps_cli/commands/custom_events.py @@ -176,6 +176,11 @@ def list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} + # convert datetime type to string + for k, v in params.items(): + if str(type(v)) == "": + params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') + if page is None: # if page is not specified, we want to fetch all pages r = client.fetch("api/v1/catalog/" + tag + "/custom-events", params=params) diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py index 382939b..62bce2d 100644 --- a/cortexapps_cli/commands/dependencies.py +++ b/cortexapps_cli/commands/dependencies.py @@ -38,7 +38,6 @@ def create( client = ctx.obj["client"] - if file_input: if description or metadata or method or path or caller_tag or callee_tag: raise typer.BadParameter("When providing a dependencies input file, do not specify any other dependency event attributes") diff --git a/data/run-time/custom-events.json b/data/run-time/custom-events.json index dfd1948..3cc7847 100644 --- a/data/run-time/custom-events.json +++ b/data/run-time/custom-events.json @@ -4,7 +4,7 @@ "test2": "someVal2" }, "description": "Validate event", - "timestamp": "2023-10-10T13:27:51.226Z", + "timestamp": "2023-10-10T13:27:51", "title": "validate event", "type": "VALIDATE_SERVICE" } diff --git a/tests/__init__.py b/tests.orig2/__init__.py similarity index 100% rename from tests/__init__.py rename to tests.orig2/__init__.py diff --git a/tests/common.py b/tests.orig2/common.py similarity index 100% rename from tests/common.py rename to tests.orig2/common.py diff --git a/tests/cortex_github.py b/tests.orig2/cortex_github.py similarity index 100% rename from tests/cortex_github.py rename to tests.orig2/cortex_github.py diff --git a/tests/feature_flag_check.py b/tests.orig2/feature_flag_check.py similarity index 100% rename from tests/feature_flag_check.py rename to tests.orig2/feature_flag_check.py diff --git a/tests/feature_flag_dump.py b/tests.orig2/feature_flag_dump.py similarity index 100% rename from tests/feature_flag_dump.py rename to tests.orig2/feature_flag_dump.py diff --git a/tests/github_setup.py b/tests.orig2/github_setup.py similarity index 100% rename from tests/github_setup.py rename to tests.orig2/github_setup.py diff --git a/tests.orig2/test_audit_logs.py b/tests.orig2/test_audit_logs.py new file mode 100644 index 0000000..7d86a75 --- /dev/null +++ b/tests.orig2/test_audit_logs.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = json_response(["audit-logs", "get"]) + assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_dates.py b/tests.orig2/test_audit_logs_dates.py new file mode 100644 index 0000000..90c4eca --- /dev/null +++ b/tests.orig2/test_audit_logs_dates.py @@ -0,0 +1,7 @@ +from tests.helpers.utils import * + +def test(): + end_date = today() + start_date = yesterday() + response = json_response(["audit-logs", "get", "-s", start_date, "-e", end_date]) + assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_end_date.py b/tests.orig2/test_audit_logs_end_date.py new file mode 100644 index 0000000..c5ab52d --- /dev/null +++ b/tests.orig2/test_audit_logs_end_date.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + end_date = today() + response = json_response(["audit-logs", "get", "-e", end_date]) + assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_page.py b/tests.orig2/test_audit_logs_page.py new file mode 100644 index 0000000..c47e476 --- /dev/null +++ b/tests.orig2/test_audit_logs_page.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = json_response(["audit-logs", "get", "-p", "0"]) + assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_size.py b/tests.orig2/test_audit_logs_size.py new file mode 100644 index 0000000..6d975da --- /dev/null +++ b/tests.orig2/test_audit_logs_size.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = json_response(["audit-logs", "get", "-p", "0", "-z", "1"]) + assert (len(response['logs']) == 1) diff --git a/tests.orig2/test_audit_logs_start_date.py b/tests.orig2/test_audit_logs_start_date.py new file mode 100644 index 0000000..0b73cf2 --- /dev/null +++ b/tests.orig2/test_audit_logs_start_date.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + start_date = yesterday() + response = json_response(["audit-logs", "get", "-s", start_date]) + assert (len(response['logs']) > 0) diff --git a/tests/test_catalog_archive_entity.py b/tests.orig2/test_catalog_archive_entity.py similarity index 100% rename from tests/test_catalog_archive_entity.py rename to tests.orig2/test_catalog_archive_entity.py diff --git a/tests/test_catalog_create_entity.py b/tests.orig2/test_catalog_create_entity.py similarity index 100% rename from tests/test_catalog_create_entity.py rename to tests.orig2/test_catalog_create_entity.py diff --git a/tests/test_catalog_create_entity_viewer.py b/tests.orig2/test_catalog_create_entity_viewer.py similarity index 100% rename from tests/test_catalog_create_entity_viewer.py rename to tests.orig2/test_catalog_create_entity_viewer.py diff --git a/tests/test_catalog_delete_entity.py b/tests.orig2/test_catalog_delete_entity.py similarity index 100% rename from tests/test_catalog_delete_entity.py rename to tests.orig2/test_catalog_delete_entity.py diff --git a/tests/test_catalog_get_entity_details.py b/tests.orig2/test_catalog_get_entity_details.py similarity index 100% rename from tests/test_catalog_get_entity_details.py rename to tests.orig2/test_catalog_get_entity_details.py diff --git a/tests/test_catalog_get_entity_details_hierarchy.py b/tests.orig2/test_catalog_get_entity_details_hierarchy.py similarity index 100% rename from tests/test_catalog_get_entity_details_hierarchy.py rename to tests.orig2/test_catalog_get_entity_details_hierarchy.py diff --git a/tests/test_catalog_include_links.py b/tests.orig2/test_catalog_include_links.py similarity index 100% rename from tests/test_catalog_include_links.py rename to tests.orig2/test_catalog_include_links.py diff --git a/tests/test_catalog_include_metadata.py b/tests.orig2/test_catalog_include_metadata.py similarity index 100% rename from tests/test_catalog_include_metadata.py rename to tests.orig2/test_catalog_include_metadata.py diff --git a/tests/test_catalog_include_nested_fields.py b/tests.orig2/test_catalog_include_nested_fields.py similarity index 100% rename from tests/test_catalog_include_nested_fields.py rename to tests.orig2/test_catalog_include_nested_fields.py diff --git a/tests/test_catalog_invalid_page_size.py b/tests.orig2/test_catalog_invalid_page_size.py similarity index 100% rename from tests/test_catalog_invalid_page_size.py rename to tests.orig2/test_catalog_invalid_page_size.py diff --git a/tests/test_catalog_list_by_github_repo.py b/tests.orig2/test_catalog_list_by_github_repo.py similarity index 100% rename from tests/test_catalog_list_by_github_repo.py rename to tests.orig2/test_catalog_list_by_github_repo.py diff --git a/tests/test_catalog_list_by_group_multiple.py b/tests.orig2/test_catalog_list_by_group_multiple.py similarity index 100% rename from tests/test_catalog_list_by_group_multiple.py rename to tests.orig2/test_catalog_list_by_group_multiple.py diff --git a/tests/test_catalog_list_by_group_single.py b/tests.orig2/test_catalog_list_by_group_single.py similarity index 100% rename from tests/test_catalog_list_by_group_single.py rename to tests.orig2/test_catalog_list_by_group_single.py diff --git a/tests/test_catalog_list_by_owners_multiple.py b/tests.orig2/test_catalog_list_by_owners_multiple.py similarity index 100% rename from tests/test_catalog_list_by_owners_multiple.py rename to tests.orig2/test_catalog_list_by_owners_multiple.py diff --git a/tests/test_catalog_list_by_owners_single.py b/tests.orig2/test_catalog_list_by_owners_single.py similarity index 100% rename from tests/test_catalog_list_by_owners_single.py rename to tests.orig2/test_catalog_list_by_owners_single.py diff --git a/tests/test_catalog_list_by_types.py b/tests.orig2/test_catalog_list_by_types.py similarity index 100% rename from tests/test_catalog_list_by_types.py rename to tests.orig2/test_catalog_list_by_types.py diff --git a/tests/test_catalog_list_entity_descriptors.py b/tests.orig2/test_catalog_list_entity_descriptors.py similarity index 100% rename from tests/test_catalog_list_entity_descriptors.py rename to tests.orig2/test_catalog_list_entity_descriptors.py diff --git a/tests/test_catalog_list_entity_descriptors_page.py b/tests.orig2/test_catalog_list_entity_descriptors_page.py similarity index 100% rename from tests/test_catalog_list_entity_descriptors_page.py rename to tests.orig2/test_catalog_list_entity_descriptors_page.py diff --git a/tests/test_catalog_list_entity_descriptors_page_size.py b/tests.orig2/test_catalog_list_entity_descriptors_page_size.py similarity index 100% rename from tests/test_catalog_list_entity_descriptors_page_size.py rename to tests.orig2/test_catalog_list_entity_descriptors_page_size.py diff --git a/tests/test_catalog_list_entity_descriptors_yaml.py b/tests.orig2/test_catalog_list_entity_descriptors_yaml.py similarity index 100% rename from tests/test_catalog_list_entity_descriptors_yaml.py rename to tests.orig2/test_catalog_list_entity_descriptors_yaml.py diff --git a/tests/test_catalog_list_include_archived.py b/tests.orig2/test_catalog_list_include_archived.py similarity index 100% rename from tests/test_catalog_list_include_archived.py rename to tests.orig2/test_catalog_list_include_archived.py diff --git a/tests/test_catalog_list_include_owners.py b/tests.orig2/test_catalog_list_include_owners.py similarity index 100% rename from tests/test_catalog_list_include_owners.py rename to tests.orig2/test_catalog_list_include_owners.py diff --git a/tests/test_catalog_list_page.py b/tests.orig2/test_catalog_list_page.py similarity index 100% rename from tests/test_catalog_list_page.py rename to tests.orig2/test_catalog_list_page.py diff --git a/tests/test_catalog_list_page_size.py b/tests.orig2/test_catalog_list_page_size.py similarity index 100% rename from tests/test_catalog_list_page_size.py rename to tests.orig2/test_catalog_list_page_size.py diff --git a/tests/test_catalog_retrieve_entity_descriptor.py b/tests.orig2/test_catalog_retrieve_entity_descriptor.py similarity index 100% rename from tests/test_catalog_retrieve_entity_descriptor.py rename to tests.orig2/test_catalog_retrieve_entity_descriptor.py diff --git a/tests/test_catalog_retrieve_entity_descriptor_yaml.py b/tests.orig2/test_catalog_retrieve_entity_descriptor_yaml.py similarity index 100% rename from tests/test_catalog_retrieve_entity_descriptor_yaml.py rename to tests.orig2/test_catalog_retrieve_entity_descriptor_yaml.py diff --git a/tests/test_catalog_retrieve_entity_details.py b/tests.orig2/test_catalog_retrieve_entity_details.py similarity index 100% rename from tests/test_catalog_retrieve_entity_details.py rename to tests.orig2/test_catalog_retrieve_entity_details.py diff --git a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py b/tests.orig2/test_catalog_retrieve_entity_details_hierarchy_fields.py similarity index 100% rename from tests/test_catalog_retrieve_entity_details_hierarchy_fields.py rename to tests.orig2/test_catalog_retrieve_entity_details_hierarchy_fields.py diff --git a/tests/test_catalog_retrieve_entity_scorecard_scores.py b/tests.orig2/test_catalog_retrieve_entity_scorecard_scores.py similarity index 100% rename from tests/test_catalog_retrieve_entity_scorecard_scores.py rename to tests.orig2/test_catalog_retrieve_entity_scorecard_scores.py diff --git a/tests/test_catalog_unarchive_entity.py b/tests.orig2/test_catalog_unarchive_entity.py similarity index 100% rename from tests/test_catalog_unarchive_entity.py rename to tests.orig2/test_catalog_unarchive_entity.py diff --git a/tests/test_config_file.py b/tests.orig2/test_config_file.py similarity index 100% rename from tests/test_config_file.py rename to tests.orig2/test_config_file.py diff --git a/tests.orig2/test_custom_data_create_or_update_in_bulk.py b/tests.orig2/test_custom_data_create_or_update_in_bulk.py new file mode 100644 index 0000000..efeb8cb --- /dev/null +++ b/tests.orig2/test_custom_data_create_or_update_in_bulk.py @@ -0,0 +1,12 @@ +from tests.helpers.utils import * + +def test(): + cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) + + response = json_response(["catalog", "details", "-t", "backend-worker"]) + list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-1"] + assert list[0]['value'] == "value-1" + + response = json_response( ["catalog", "details", "-t", "ach-payments-nacha"]) + list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-4"] + assert list[0]['value'] == "value-4" diff --git a/tests.orig2/test_custom_data_delete.py b/tests.orig2/test_custom_data_delete.py new file mode 100644 index 0000000..e338206 --- /dev/null +++ b/tests.orig2/test_custom_data_delete.py @@ -0,0 +1,12 @@ +from tests.helpers.utils import * + +def test(): + cli(["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) + + response = json_response(["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) + assert response['value'] == "yes" + + cli(["custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) + + response = json_response(["catalog", "details", "-t", "recommendations"]) + assert not any(metadata['key'] == 'delete-me' for metadata in response['metadata']) diff --git a/tests.orig2/test_custom_data_list.py b/tests.orig2/test_custom_data_list.py new file mode 100644 index 0000000..ae15abc --- /dev/null +++ b/tests.orig2/test_custom_data_list.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + response = json_response(["catalog", "details", "-t", "backend-worker"]) + list = [metadata for metadata in response['metadata'] if metadata['key'] == "cicd"] + assert list[0]['value'] == "circle-ci" diff --git a/tests.orig2/test_custom_events_list.py b/tests.orig2/test_custom_events_list.py new file mode 100644 index 0000000..e406bd9 --- /dev/null +++ b/tests.orig2/test_custom_events_list.py @@ -0,0 +1,15 @@ +from common import * + +def test(capsys): + cli(["-q", "custom-events", "delete-all", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) + cli(["-q", "custom-events", "create", "-t", "transaction-store", "-f", "data/run-time/custom-events.json"]) + capsys.readouterr() + + response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store"]) + assert response['events'][0]['type'] == "VALIDATE_SERVICE" + + response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) + assert response['events'][0]['type'] == "VALIDATE_SERVICE" + + response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE", "-i", "2023-10-10T13:27:51.226"]) + assert response['events'][0]['type'] == "VALIDATE_SERVICE" diff --git a/tests.orig2/test_custom_events_uuid.py b/tests.orig2/test_custom_events_uuid.py new file mode 100644 index 0000000..507c1d5 --- /dev/null +++ b/tests.orig2/test_custom_events_uuid.py @@ -0,0 +1,26 @@ +from common import * + +def test(capsys): + response = cli_command(capsys, ["custom-events", "create", "-t", "warehousing", "-f", "data/run-time/custom-events-configure.json"]) + uuid = response['uuid'] + + cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + assert response['type'] == "CONFIG_SERVICE" + + cli(["-q", "custom-events", "update-by-uuid", "-t", "warehousing", "-u", uuid, "-f", "data/run-time/custom-events.json"]) + capsys.readouterr() + + response = cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + assert response['type'] == "VALIDATE_SERVICE" + + cli(["-q", "custom-events", "delete-by-uuid", "-t", "warehousing", "-u", uuid]) + + # Custom event was deleted, so verify it cannot be retrieved. + with pytest.raises(SystemExit) as excinfo: + cli(["-q", "custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + out, err = capsys.readouterr() + + assert out == "Bad Request" + assert excinfo.value.code == 144 + + cli(["-q", "custom-events", "delete-all", "-t", "warehousing"]) diff --git a/tests/test_docs.py b/tests.orig2/test_docs.py similarity index 100% rename from tests/test_docs.py rename to tests.orig2/test_docs.py diff --git a/tests/test_entity_types.py b/tests.orig2/test_entity_types.py similarity index 100% rename from tests/test_entity_types.py rename to tests.orig2/test_entity_types.py diff --git a/tests/test_github.py b/tests.orig2/test_github.py similarity index 100% rename from tests/test_github.py rename to tests.orig2/test_github.py diff --git a/tests/test_groups.py b/tests.orig2/test_groups.py similarity index 100% rename from tests/test_groups.py rename to tests.orig2/test_groups.py diff --git a/tests/test_ip_allowlist.py b/tests.orig2/test_ip_allowlist.py similarity index 100% rename from tests/test_ip_allowlist.py rename to tests.orig2/test_ip_allowlist.py diff --git a/tests/test_packages.py b/tests.orig2/test_packages.py similarity index 100% rename from tests/test_packages.py rename to tests.orig2/test_packages.py diff --git a/tests/test_packages_java.py b/tests.orig2/test_packages_java.py similarity index 100% rename from tests/test_packages_java.py rename to tests.orig2/test_packages_java.py diff --git a/tests/test_plugins.py b/tests.orig2/test_plugins.py similarity index 100% rename from tests/test_plugins.py rename to tests.orig2/test_plugins.py diff --git a/tests/test_plugins_invalid.py b/tests.orig2/test_plugins_invalid.py similarity index 100% rename from tests/test_plugins_invalid.py rename to tests.orig2/test_plugins_invalid.py diff --git a/tests/test_scorecards.py b/tests.orig2/test_scorecards.py similarity index 100% rename from tests/test_scorecards.py rename to tests.orig2/test_scorecards.py diff --git a/tests/test_scorecards_drafts.py b/tests.orig2/test_scorecards_drafts.py similarity index 100% rename from tests/test_scorecards_drafts.py rename to tests.orig2/test_scorecards_drafts.py diff --git a/tests/test_teams.py b/tests.orig2/test_teams.py similarity index 100% rename from tests/test_teams.py rename to tests.orig2/test_teams.py diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py index ca3f029..cc978e8 100644 --- a/tests/helpers/utils.py +++ b/tests/helpers/utils.py @@ -2,11 +2,18 @@ from datetime import datetime from datetime import timedelta from datetime import timezone +from enum import Enum import json +import pytest from typer.testing import CliRunner runner = CliRunner() +class ReturnType(str, Enum): + JSON = "JSON" + RAW = "RAW" + STDOUT = "STDOUT" + def today(): return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S") @@ -15,10 +22,21 @@ def yesterday(): yesterday = today - timedelta(days = 1) return yesterday.strftime("%Y-%m-%dT%H:%M:%S") -def json_response(params): - print("params = " + str(params)) - response = runner.invoke(app, params) - return json.loads(response.stdout) +def cli(params, return_type=ReturnType.JSON): + if not isinstance(return_type, ReturnType): + raise TypeError('return_type must be an instance of ReturnType Enum') + + result = runner.invoke(app, params) -def cli(params): - runner.invoke(app, params) + match return_type: + case ReturnType.JSON: + if result.stdout == "": + return json.loads('{}') + else: + return json.loads(result.stdout) + case ReturnType.RAW: + return result + case ReturnType.STDOUT: + return result.stdout + case ReturnType.STDERR: + return result.stderr diff --git a/tests/test_audit_logs.py b/tests/test_audit_logs.py index 7d86a75..5401b99 100644 --- a/tests/test_audit_logs.py +++ b/tests/test_audit_logs.py @@ -1,5 +1,5 @@ from tests.helpers.utils import * def test(): - response = json_response(["audit-logs", "get"]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get"]) + assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_dates.py b/tests/test_audit_logs_dates.py index 90c4eca..b9d40f7 100644 --- a/tests/test_audit_logs_dates.py +++ b/tests/test_audit_logs_dates.py @@ -3,5 +3,5 @@ def test(): end_date = today() start_date = yesterday() - response = json_response(["audit-logs", "get", "-s", start_date, "-e", end_date]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get", "-s", start_date, "-e", end_date]) + assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_end_date.py b/tests/test_audit_logs_end_date.py index c5ab52d..2667e0e 100644 --- a/tests/test_audit_logs_end_date.py +++ b/tests/test_audit_logs_end_date.py @@ -2,5 +2,5 @@ def test(): end_date = today() - response = json_response(["audit-logs", "get", "-e", end_date]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get", "-e", end_date]) + assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_page.py b/tests/test_audit_logs_page.py index c47e476..2e17e80 100644 --- a/tests/test_audit_logs_page.py +++ b/tests/test_audit_logs_page.py @@ -1,5 +1,5 @@ from tests.helpers.utils import * def test(): - response = json_response(["audit-logs", "get", "-p", "0"]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get", "-p", "0"]) + assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_size.py b/tests/test_audit_logs_size.py index 6d975da..a98056b 100644 --- a/tests/test_audit_logs_size.py +++ b/tests/test_audit_logs_size.py @@ -1,5 +1,5 @@ from tests.helpers.utils import * def test(): - response = json_response(["audit-logs", "get", "-p", "0", "-z", "1"]) - assert (len(response['logs']) == 1) + result = cli(["audit-logs", "get", "-p", "0", "-z", "1"]) + assert (len(result['logs']) == 1) diff --git a/tests/test_audit_logs_start_date.py b/tests/test_audit_logs_start_date.py index 0b73cf2..bd3884b 100644 --- a/tests/test_audit_logs_start_date.py +++ b/tests/test_audit_logs_start_date.py @@ -2,5 +2,5 @@ def test(): start_date = yesterday() - response = json_response(["audit-logs", "get", "-s", start_date]) - assert (len(response['logs']) > 0) + result = cli(["audit-logs", "get", "-s", start_date]) + assert (len(result['logs']) > 0) diff --git a/tests/test_custom_data_create_or_update_in_bulk.py b/tests/test_custom_data_create_or_update_in_bulk.py index efeb8cb..ff89a87 100644 --- a/tests/test_custom_data_create_or_update_in_bulk.py +++ b/tests/test_custom_data_create_or_update_in_bulk.py @@ -3,10 +3,10 @@ def test(): cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - response = json_response(["catalog", "details", "-t", "backend-worker"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-1"] + result = cli(["catalog", "details", "-t", "backend-worker"]) + list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-1"] assert list[0]['value'] == "value-1" - response = json_response( ["catalog", "details", "-t", "ach-payments-nacha"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-4"] + result = cli( ["catalog", "details", "-t", "ach-payments-nacha"]) + list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-4"] assert list[0]['value'] == "value-4" diff --git a/tests/test_custom_data_delete.py b/tests/test_custom_data_delete.py index e338206..07db6ef 100644 --- a/tests/test_custom_data_delete.py +++ b/tests/test_custom_data_delete.py @@ -3,10 +3,10 @@ def test(): cli(["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) - response = json_response(["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) - assert response['value'] == "yes" + result = cli(["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) + assert result['value'] == "yes" cli(["custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) - response = json_response(["catalog", "details", "-t", "recommendations"]) - assert not any(metadata['key'] == 'delete-me' for metadata in response['metadata']) + result = cli(["catalog", "details", "-t", "recommendations"]) + assert not any(metadata['key'] == 'delete-me' for metadata in result['metadata']) diff --git a/tests/test_custom_data_list.py b/tests/test_custom_data_list.py index ae15abc..0bfa5c1 100644 --- a/tests/test_custom_data_list.py +++ b/tests/test_custom_data_list.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * def test(): - response = json_response(["catalog", "details", "-t", "backend-worker"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "cicd"] + result = cli(["catalog", "details", "-t", "backend-worker"]) + list = [metadata for metadata in result['metadata'] if metadata['key'] == "cicd"] assert list[0]['value'] == "circle-ci" diff --git a/tests/test_custom_events_list.py b/tests/test_custom_events_list.py index e406bd9..7ccbb0a 100644 --- a/tests/test_custom_events_list.py +++ b/tests/test_custom_events_list.py @@ -1,15 +1,14 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - cli(["-q", "custom-events", "delete-all", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - cli(["-q", "custom-events", "create", "-t", "transaction-store", "-f", "data/run-time/custom-events.json"]) - capsys.readouterr() +def test(): + cli(["custom-events", "delete-all", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) + cli(["custom-events", "create", "-t", "transaction-store", "-f", "data/run-time/custom-events.json"]) - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "list", "-t", "transaction-store"]) + assert result['events'][0]['type'] == "VALIDATE_SERVICE" - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) + assert result['events'][0]['type'] == "VALIDATE_SERVICE" - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE", "-i", "2023-10-10T13:27:51.226"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE", "-ts", "2023-10-10T13:27:51"]) + assert result['events'][0]['type'] == "VALIDATE_SERVICE" diff --git a/tests/test_custom_events_uuid.py b/tests/test_custom_events_uuid.py index 507c1d5..1a49b23 100644 --- a/tests/test_custom_events_uuid.py +++ b/tests/test_custom_events_uuid.py @@ -1,26 +1,24 @@ -from common import * +from tests.helpers.utils import * -def test(capsys): - response = cli_command(capsys, ["custom-events", "create", "-t", "warehousing", "-f", "data/run-time/custom-events-configure.json"]) - uuid = response['uuid'] +def test(): + result = cli(["custom-events", "create", "-t", "warehousing", "-f", "data/run-time/custom-events-configure.json"]) + uuid = result['uuid'] - cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - assert response['type'] == "CONFIG_SERVICE" + result = cli(["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + assert result['type'] == "CONFIG_SERVICE" - cli(["-q", "custom-events", "update-by-uuid", "-t", "warehousing", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - capsys.readouterr() + cli(["custom-events", "update-by-uuid", "-t", "warehousing", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - response = cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - assert response['type'] == "VALIDATE_SERVICE" + result = cli(["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + assert result['type'] == "VALIDATE_SERVICE" - cli(["-q", "custom-events", "delete-by-uuid", "-t", "warehousing", "-u", uuid]) + cli(["custom-events", "delete-by-uuid", "-t", "warehousing", "-u", uuid]) # Custom event was deleted, so verify it cannot be retrieved. - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - out, err = capsys.readouterr() + # with pytest.raises(SystemExit) as excinfo: + result = cli(["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid], ReturnType.RAW) + out = result.stdout + assert "HTTP Error 404: Not Found" in out, "An HTTP 404 error code should be thrown" + assert result.exit_code == 1 - assert out == "Bad Request" - assert excinfo.value.code == 144 - - cli(["-q", "custom-events", "delete-all", "-t", "warehousing"]) + cli(["custom-events", "delete-all", "-t", "warehousing"]) diff --git a/tests/test_custom_metrics.py b/tests/test_custom_metrics.py index 840e17b..c7f6203 100644 --- a/tests/test_custom_metrics.py +++ b/tests/test_custom_metrics.py @@ -3,11 +3,11 @@ def test(): cli(["custom-metrics", "delete", "-t", "shipping-integrations", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) cli(["custom-metrics", "add", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "3.0"]) - response = json_response(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) - assert response['data'][0]['value'] == 3.0, "should have single value of 3.0" + result = cli(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) + assert result['data'][0]['value'] == 3.0, "should have single value of 3.0" cli(["custom-metrics", "add-in-bulk", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "2024-07-01T00:00:00=1.0", "-v", "2024-08-01T00:00:00=2.0"]) - response = json_response(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) - assert response['total'] == 3, "should have total of 3 metrics data points" + result = cli(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) + assert result['total'] == 3, "should have total of 3 metrics data points" print("There is not a good way to test this today because there is a pre-requisite that the custom metric already exists.") print("If you manually create the custom metric named 'vulnerabilities' you can run these tests.") diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index a475a07..d9baf74 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -8,17 +8,16 @@ def test(): cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["dependencies", "add", "-r", callerTag, "-e", - calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies.json"]) + cli(["dependencies", "create", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs"]) cli(["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) - response = json_response(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) - assert response["callerTag"] == callerTag, "callerTag should be " + callerTag - assert response["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag + result = cli(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) + assert result["callerTag"] == callerTag, "callerTag should be " + callerTag + assert result["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag cli(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) - response = json_response(["dependencies", "get-all", "-r", "fraud-analyzer", "-o"]) - assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in response["dependencies"]) + result = cli(["dependencies", "get-all", "-r", "fraud-analyzer", "-o"]) + assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in result["dependencies"]) cli(["dependencies", "delete", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/audit-logs"]) cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) diff --git a/tests/test_deploys.py b/tests/test_deploys.py index cdaa872..db3136c 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -5,41 +5,41 @@ def _add_deploy(): def _delete_all(): cli(["deploys", "delete-all"]) - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - assert len(response['deployments']) == 0, "All deployments for entity should be deleted" + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + assert len(result['deployments']) == 0, "All deployments for entity should be deleted" def test_deploys(): _delete_all() - response = json_response(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) - uuid = response['uuid'] + result = cli(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) + uuid = result['uuid'] _add_deploy() - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - assert any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should find a deploy with uuid" - assert response['total'] == 2, "Two deploys should be returned for entity" + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + assert any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should find a deploy with uuid" + assert result['total'] == 2, "Two deploys should be returned for entity" cli(["deploys", "update-by-uuid", "-t", "shipping-integrations", "-uu", uuid, "-f", "data/run-time/deploys-update.json"]) - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" cli(["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-uu", uuid]) - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - assert not any(deploy['uuid'] == uuid for deploy in response['deployments']), "Should not find a deploy with uuid" - assert response['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + assert not any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should not find a deploy with uuid" + assert result['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" _add_deploy() cli(["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - assert not any(deploy['sha'] == "SHA-123456" for deploy in response['deployments']), "Should not find a deploy with sha that was deleted" + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + assert not any(deploy['sha'] == "SHA-123456" for deploy in result['deployments']), "Should not find a deploy with sha that was deleted" _add_deploy() cli(["deploys", "delete-by-filter", "-ty", "DEPLOY"]) - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - assert not any(deploy['type'] == "DEPLOY" for deploy in response['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + assert not any(deploy['type'] == "DEPLOY" for deploy in result['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" - response = json_response(["deploys", "add", + result = cli(["deploys", "add", "-t", "shipping-integrations", "--email", "julien@tpb.com", "--name", "Julien", @@ -50,9 +50,9 @@ def test_deploys(): "--url", "https://tpb.com", "-c", "abc=123", "-c", "def=456"]) - uuid = response['uuid'] - response = json_response(["deploys", "list", "-t", "shipping-integrations"]) - deploy = [deploy for deploy in response['deployments'] if deploy['uuid'] == uuid] + uuid = result['uuid'] + result = cli(["deploys", "list", "-t", "shipping-integrations"]) + deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-123456", "Should find a deploy with sha" assert deploy[0]['deployer']['email'] == "julien@tpb.com", "Email should be set for deploy" assert deploy[0]['deployer']['name'] == "Julien", "Name should be set for deploy" From 85560febde64943564800fda0c15c67bfab5d762 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Sat, 12 Oct 2024 19:24:33 -0400 Subject: [PATCH 18/56] print from context fn to move more output flag validation to utils --- STYLE.md | 55 +++++++++++++++++++++++ cortexapps_cli/commands/catalog.py | 21 ++++----- cortexapps_cli/cortex_client.py | 31 ++----------- cortexapps_cli/utils.py | 72 +++++++++++++++++++++++++----- 4 files changed, 128 insertions(+), 51 deletions(-) create mode 100644 STYLE.md diff --git a/STYLE.md b/STYLE.md new file mode 100644 index 0000000..724b2ed --- /dev/null +++ b/STYLE.md @@ -0,0 +1,55 @@ +# CLI commands style guide (WIP) + +Here are some guidelines on developing commands for Cortex CLI + +## Flags and arguments + +* Prefer flags over arguments, so that command actions are clearer and future additions are less likely to break existing scripts. +* Flags should have a long two-dash version and a short single-dash version: `--long-version`, `-l`. +* Try to use the same short version flag everywhere. We want to avoud having a single letter flag that means different things in different commands. +* Flags that are multiple words should be in `kebab-case`. + +## General forms + +Commands should be readable and easy to understand. In general, the parts of a command may be: + +* Executable name e.g., `cortex` +* Global flags that affect the behavior of the executable as a whole, like `--tenant` or `--config` +* Top-level object type or topic e.g., `team`, `catalog` +* Top level verb e.g., `create`, `list`, `add` +* Verb objects, if applicable, e.g., `links`, `description` +* Command-specific arguments and flags, e.g., `--description`, `--file` + +Examples: +``` +# list catalog entries of type 'service' and include ownership info +cortex catalog list --include-owners --types service + +# create a team from a file +cortex teams create --file input.json + +# add a link to a team +cortex teams add link --url https://www.catster.com --type documentation --name Catster +``` + +## Standard verbs + +Recommendations for verbs to use in the CLI, and their meanings: + +* **list** - List out a resource of which there may be many. If the endpoing is paginated, retrieve all pages by default. Optionally provide `--page` and `--page-size` to allow the user to get a single page. This should map to either a get or a fetch in the client. Provide options to the user for table and CSV output. + +* **get** - Get the full details of a specific object. This would usually map to a HTTPS GET. The user would expect to see detailed information about a single object. + +* **create** - Create an object. If your command is not creating an object but rather adding information to an existing object, it should be called **add** rather than create. Create should fail if the object already exists. Consider adding `--replace-existing` and `--update-existing` flags if you want to allow this behavior for users. Original create commands required the full definition of the object in JSON or YAML; all new create commands should have this as default behavior as well. Consider adding flags to also allow creation of objects without a full JSON/YAML object definition. + +* **delete** - Delete an object. If the the terminal is interactive, prompt the user to make sure they really want to delete. Provide a `--force` flag that skips the prompt, but when the terminal is interactive says what it's going to do and waits ten seconds in case the user changes their mind; instruct the user to hit Ctrl+C to abort. When the terminal is not interactive (when the delete command is happening as part of a script of batch process) delete with `--force` should succeed immediately and delete without `--force` should fail immediately. + +* **update** - Make changes to an existing object. Accept a full object definition in JSON or YAML as appropriate. Ideally, also accept a partial object definition. If the object definition is valid, retrieve the existing object, merge the changes in the provided definition, and apply the update. + +* **archive/unarchive** - In some cases, these operations could be accomplished via **update** but they should be provided as separate verbs as well. + +* **add/remove** - Add items to or remove items from object attributes that are lists. In many cases this could be accomplished by **get/update** above, but in the case of commonly used attributes like *links* they should be provided as separate verbs as well. Unlike **delete**, it's not necessary to prompt or warn the user before executing **remove**. Consider also providing a **list** subcommand to list existing values in the attribute. + +* **set/unset** - Set or unset object attributes that are not lists. In many cases this could be accomplished by **get/update** above, but in the case of commonly used attributes like *description* they should be provided as separate verbs as well. Unlike **delete**, it's not necessary to prompt or warn the user before executing **unset**. Consider also providing a **show** command to show the existing value of the attribute. + +* **open** - Open the specified object(s) in the user's browser. Fail immediately if the terminal is not active or a browser is not available. Consider warning the user if this would result in opening more than 3 browser tabs. diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 430bb94..2d0b1aa 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -2,18 +2,18 @@ from typing import Optional, List from typing_extensions import Annotated -from cortexapps_cli.utils import print_output +from cortexapps_cli.utils import print_output_with_context app = typer.Typer(help="Catalog commands") class ListCommandOptions: table_output = Annotated[ Optional[bool], - typer.Option("--table", help="Output the response as a table", show_default=False) + typer.Option("--table", help="Output the response as a table", show_default=False) # , callback=table_output_cb) ] csv_output = Annotated[ Optional[bool], - typer.Option("--csv", help="Output the response as CSV", show_default=False) + typer.Option("--csv", help="Output the response as CSV", show_default=False) # , callback=csv_output_cb) ] columns = Annotated[ Optional[List[str]], @@ -32,7 +32,6 @@ class ListCommandOptions: typer.Option("--page-size", "-z", help="Page size for results", show_default=False) ] - class CatalogCommandOptions: include_archived = Annotated[ Optional[bool], @@ -102,11 +101,8 @@ def catalog_list( ): client = ctx.obj["client"] - if table_output and csv_output: - raise typer.BadParameter("Only one of --table and --csv can be specified") - - if (table_output or csv_output) and not columns: - columns = [ + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ "ID=id", "Tag=tag", "Name=name", @@ -114,8 +110,6 @@ def catalog_list( "Git Repository=git.repository", ] - output_format = "table" if table_output else "csv" if csv_output else "json" - params = { "includeArchived": include_archived, "hierarchyDepth": hierarchy_depth, @@ -147,5 +141,6 @@ def catalog_list( # if page is specified, we want to fetch only that page r = client.get("api/v1/catalog", params=params) - data = r if output_format == 'json' else r.get('entities', []) - print_output(data=data, columns=columns, filters=filters, output_format=output_format) + data = r + # print_output(data=data, columns=columns, filters=filters, output_format=output_format) + print_output_with_context(ctx, data) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index c74ae4b..a5db7d4 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -3,38 +3,13 @@ import typer from rich import print +from cortexapps_cli.utils import guess_data_key + class CortexClient: def __init__(self, api_key, base_url='https://api.getcortexapp.com'): self.api_key = api_key self.base_url = base_url - def guess_data_key(self, response: list | dict): - """ - Guess the key of the data list in a paginated response. - - Args: - response (list or dict): The response to guess the data key from. - - Returns: - The key of the data list in the response. - """ - if isinstance(response, list): - # if the response is a list, there is no data key - return '' - if isinstance(response, dict): - # if the response is a dict, it should have exactly one key whose value is a list - data_keys = [k for k, v in response.items() if isinstance(v, list)] - if len(data_keys) == 0: - # if no such key is found, raise an error - raise ValueError(f"Response dict does not contain a list: {response}") - if len(data_keys) > 1: - # if more than one such key is found, raise an error - raise ValueError(f"Response dict contains multiple lists: {response}") - return data_keys[0] - - # if the response is neither a list nor a dict, raise an error - raise ValueError(f"Response is not a list or dict: {response}") - def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=False, raw_response=False, content_type='application/json'): req_headers = { 'Authorization': f'Bearer {self.api_key}', @@ -104,7 +79,7 @@ def fetch(self, endpoint, params={}, headers={}): if data_key is None: # first page, guess the data key - data_key = self.guess_data_key(response) + data_key = guess_data_key(response) # Some endpoints just return an array as the root element. In those cases, data_key is '' if data_key == '': diff --git a/cortexapps_cli/utils.py b/cortexapps_cli/utils.py index e026b26..3858914 100644 --- a/cortexapps_cli/utils.py +++ b/cortexapps_cli/utils.py @@ -4,10 +4,39 @@ import sys import typer +from typing import overload + from rich import print_json from rich.table import Table from rich.console import Console +def guess_data_key(response: list | dict): + """ + Guess the key of the data list in a paginated response. + + Args: + response (list or dict): The response to guess the data key from. + + Returns: + The key of the data list in the response. + """ + if isinstance(response, list): + # if the response is a list, there is no data key + return '' + if isinstance(response, dict): + # if the response is a dict, it should have exactly one key whose value is a list + data_keys = [k for k, v in response.items() if isinstance(v, list)] + if len(data_keys) == 0: + # if no such key is found, raise an error + raise ValueError(f"Response dict does not contain a list: {response}") + if len(data_keys) > 1: + # if more than one such key is found, raise an error + raise ValueError(f"Response dict contains multiple lists: {response}") + return data_keys[0] + + # if the response is neither a list nor a dict, raise an error + raise ValueError(f"Response is not a list or dict: {response}") + def get_value_at_path(data, path): """ Get the value at a specified path in a nested dictionary. @@ -85,10 +114,12 @@ def print_output(data, columns=None, filters=None, output_format='json'): filters: A list of filters to apply to the data. output_format: The format to print the data in. """ - - if not output_format in ['json', 'table', 'csv']: + + if output_format is None: + output_format = 'json' + elif not output_format in ['json', 'table', 'csv']: raise ValueError("Invalid output format. Must be one of: json, table, csv") - + if output_format == 'json': if columns: raise typer.BadParameter("Columns can only be specified when using --table or --csv") @@ -96,10 +127,16 @@ def print_output(data, columns=None, filters=None, output_format='json'): raise typer.BadParameter("Filters can only be specified when using --table or --csv") print_json(data=data) return - + + data_key = guess_data_key(data) + table_data = data.get(data_key) if data_key else data + + if not isinstance(table_data, list): + raise ValueError(f"Data is not a list: {table_data}") + if not columns: raise typer.BadParameter("Columns must be specified when using --table or --csv") - + for idx, column in enumerate(columns): if not re.match(r"^[a-zA-Z0-9_. ]+=[a-zA-Z0-9_.]+$", column): if re.match(r"^[a-zA-Z0-9_.]+$", column): @@ -107,20 +144,20 @@ def print_output(data, columns=None, filters=None, output_format='json'): columns[idx] = f"{column}={column}" else: raise typer.BadParameter("Columns must be in the format HeaderName=jsonpath") - + if filters: for filter in filters: if not re.match(r"^[a-zA-Z0-9_.]+=.+$", filter): raise typer.BadParameter("Filters must be in the format jsonpath=regex") - + column_headers = [x.split('=')[0] for x in columns] column_accessors = [x.split('=')[1] for x in columns] rows = [] - - for item in data: + + for item in table_data: if matches_filters(item, filters): rows.append([humanize_value(get_value_at_path(item, accessor)) for accessor in column_accessors]) - + if output_format == 'table': table = Table() for header in column_headers: @@ -133,3 +170,18 @@ def print_output(data, columns=None, filters=None, output_format='json'): csv_writer = csv.writer(sys.stdout) csv_writer.writerow(column_headers) csv_writer.writerows(rows) + +def print_output_with_context(ctx: typer.Context, data): + columns = ctx.params.get('columns', None) + filters = ctx.params.get('filters', None) + table_output = ctx.params.get('table_output', None) + csv_output = ctx.params.get('csv_output', None) + if table_output and csv_output: + raise typer.BadParameter("Only one of --table and --csv can be specified") + if table_output: + output_format = 'table' + elif csv_output: + output_format = 'csv' + else: + output_format = 'json' + print_output(data, columns, filters, output_format) From acec2763443822b06af1696b58409b4f47bf5497 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 14 Oct 2024 10:05:54 -0700 Subject: [PATCH 19/56] Add discovery audit --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/discovery_audit.py | 80 ++++++++++++++++++++++ tests/test_discovery_audit.py | 14 ++++ 3 files changed, 96 insertions(+) create mode 100644 cortexapps_cli/commands/discovery_audit.py create mode 100644 tests/test_discovery_audit.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 9fa6ce3..e35a45f 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -16,6 +16,7 @@ import cortexapps_cli.commands.custom_metrics as custom_metrics import cortexapps_cli.commands.dependencies as dependencies import cortexapps_cli.commands.deploys as deploys +import cortexapps_cli.commands.discovery_audit as discovery_audit import cortexapps_cli.commands.raw as raw import cortexapps_cli.commands.teams as teams @@ -29,6 +30,7 @@ app.add_typer(custom_metrics.app, name="custom-metrics") app.add_typer(dependencies.app, name="dependencies") app.add_typer(deploys.app, name="deploys") +app.add_typer(discovery_audit.app, name="discovery-audit") app.add_typer(raw.app, name="raw") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/discovery_audit.py b/cortexapps_cli/commands/discovery_audit.py new file mode 100644 index 0000000..6ea7df4 --- /dev/null +++ b/cortexapps_cli/commands/discovery_audit.py @@ -0,0 +1,80 @@ +from collections import defaultdict +from datetime import datetime +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +class DiscoveryType(str, Enum): + APM_RESOURCE_NOT_DETECTED = "APM_RESOURCE_NOT_DETECTED" + AWS_RESOURCE_NOT_DETECTED = "AWS_RESOURCE_NOT_DETECTED" + AZURE_RESOURCE_NOT_DETECTED = "AZURE_RESOURCE_NOT_DETECTED" + ECS_RESOURCE_NOT_DETECTED = "ECS_RESOURCE_NOT_DETECTED" + GOOGLE_CLOUD_RESOURCE_NOT_DETECTED = "GOOGLE_CLOUD_RESOURCE_NOT_DETECTED" + NEW_APM_RESOURCE = "NEW_APM_RESOURCE" + NEW_AWS_RESOURCE = "NEW_AWS_RESOURCE" + NEW_AZURE_RESOURCE = "NEW_AZURE_RESOURCE" + NEW_ECS_RESOURCE = "NEW_ECS_RESOURCE" + NEW_GOOGLE_CLOUD_RESOURCE = "NEW_GOOGLE_CLOUD_RESOURCE" + NEW_K8S_RESOURCE = "NEW_K8S_RESOURCE" + NEW_REPOSITORY = "NEW_REPOSITORY" + REPOSITORY_ARCHIVED = "REPOSITORY_ARCHIVED" + REPOSITORY_DELETED = "REPOSITORY_DELETED" + +class DiscoverySource(str, Enum): + AWS = "AWS" + AZURE_DEVOPS = "AZURE_DEVOPS" + AZURE_RESOURCES = "AZURE_RESOURCES" + BITBUCKET = "BITBUCKET" + DATADOG = "DATADOG" + DYNATRACE = "DYNATRACE" + ECS = "ECS" + GCP = "GCP" + GITHUB = "GITHUB" + GITLAB = "GITLAB" + INSTANA = "INSTANA" + K8S = "K8S" + LIGHTSTEP = "LIGHTSTEP" + LAMBDA = "LAMBDA" + LAMBDA_CLOUD_CONTROL = "LAMBDA_CLOUD_CONTROL" + NEWRELIC = "NEWRELIC" + SERVICENOW = "SERVICENOW" + SERVICENOW_DOMAIN = "SERVICENOW_DOMAIN" + +app = typer.Typer(help="Discovery Audit commands") + +@app.command() +def get( + ctx: typer.Context, + include_ignored: bool = typer.Option(False, "--include-ignored", "-ii", help="Include ignore events in result"), + type: DiscoveryType = typer.Option(None, "--type", "-ty", help="The type of audit event"), + source: DiscoverySource = typer.Option(None, "--source", "-s", help="The source of the audit event"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + This report shows you recent changes in your environment that aren't reflected in Cortex, including newly created repositories, services, and resources that we discover from your integrations or which were deleted in the environment but corresponding Cortex entities are still present. + """ + + client = ctx.obj["client"] + + params = { + "includeIgnored": include_ignored, + "page": page, + "pageSize": page_size, + "source": source, + "type": type + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/discovery-audit", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/discovery-audit", params=params) + + print_json(data=r) diff --git a/tests/test_discovery_audit.py b/tests/test_discovery_audit.py new file mode 100644 index 0000000..abef9db --- /dev/null +++ b/tests/test_discovery_audit.py @@ -0,0 +1,14 @@ +from tests.helpers.utils import * + +def test_discovery_audit_get(): + result = cli(["discovery-audit", "get"]) + +def test_discovery_audit_get_include_ignored(): + result = cli(["discovery-audit", "get", "-ii"]) + +def test_discovery_audit_filter_on_source(): + result = cli(["discovery-audit", "get", "-s", "GITHUB"]) + +def test_discovery_audit_filter_on_type(): + result = cli(["discovery-audit", "get", "-ty", "NEW_REPOSITORY"]) + From c1a756ec17a2af6c0181235c9cdc36372bb9d4f9 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 14 Oct 2024 10:32:10 -0700 Subject: [PATCH 20/56] Separate discovery audit tests --- tests/test_discovery_audit.py | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 tests/test_discovery_audit.py diff --git a/tests/test_discovery_audit.py b/tests/test_discovery_audit.py deleted file mode 100644 index abef9db..0000000 --- a/tests/test_discovery_audit.py +++ /dev/null @@ -1,14 +0,0 @@ -from tests.helpers.utils import * - -def test_discovery_audit_get(): - result = cli(["discovery-audit", "get"]) - -def test_discovery_audit_get_include_ignored(): - result = cli(["discovery-audit", "get", "-ii"]) - -def test_discovery_audit_filter_on_source(): - result = cli(["discovery-audit", "get", "-s", "GITHUB"]) - -def test_discovery_audit_filter_on_type(): - result = cli(["discovery-audit", "get", "-ty", "NEW_REPOSITORY"]) - From de6d2414a73bf0859737de656fc94edaa298c3f4 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 14 Oct 2024 10:34:29 -0700 Subject: [PATCH 21/56] Break out discovery audit tests --- cortexapps_cli/commands/catalog.py | 41 +++++++++++++++++++ .../test_discovery_audit_filter_on_source.py | 5 +++ tests/test_discovery_audit_filter_on_type.py | 5 +++ tests/test_discovery_audit_get.py | 4 ++ ...est_discovery_audit_get_include_ignored.py | 4 ++ 5 files changed, 59 insertions(+) create mode 100644 tests/test_discovery_audit_filter_on_source.py create mode 100644 tests/test_discovery_audit_filter_on_type.py create mode 100644 tests/test_discovery_audit_get.py create mode 100644 tests/test_discovery_audit_get_include_ignored.py diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 2d0b1aa..151e986 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -144,3 +144,44 @@ def catalog_list( data = r # print_output(data=data, columns=columns, filters=filters, output_format=output_format) print_output_with_context(ctx, data) + +@app.command() +def details( + ctx: typer.Context, + hierarchy_depth: CatalogCommandOptions.hierarchy_depth = 'full', + include_hierarchy_fields: CatalogCommandOptions.include_hierarchy_fields = None, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filter = [], +): + client = ctx.obj["client"] + + if table_output and csv_output: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if (table_output or csv_output) and not columns: + columns = [ + "ID=id", + "Tag=tag", + "Name=name", + "Type=type", + "Git Repository=git.repository", + ] + + output_format = "table" if table_output else "csv" if csv_output else "json" + + params = { + "hierarchyDepth": hierarchy_depth, + "includeHierarchyFields": include_hierarchy_fields + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/catalog/" + tag, params=params) + + data = r if output_format == 'json' else [r] + #print_output(data=data, columns=columns, filters=filters, output_format=output_format) + print_output_with_context(ctx, data) diff --git a/tests/test_discovery_audit_filter_on_source.py b/tests/test_discovery_audit_filter_on_source.py new file mode 100644 index 0000000..4ac4d5f --- /dev/null +++ b/tests/test_discovery_audit_filter_on_source.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get", "-s", "GITHUB"]) + diff --git a/tests/test_discovery_audit_filter_on_type.py b/tests/test_discovery_audit_filter_on_type.py new file mode 100644 index 0000000..b2e5199 --- /dev/null +++ b/tests/test_discovery_audit_filter_on_type.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get", "-ty", "NEW_REPOSITORY"]) + diff --git a/tests/test_discovery_audit_get.py b/tests/test_discovery_audit_get.py new file mode 100644 index 0000000..ed3f18a --- /dev/null +++ b/tests/test_discovery_audit_get.py @@ -0,0 +1,4 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get"]) diff --git a/tests/test_discovery_audit_get_include_ignored.py b/tests/test_discovery_audit_get_include_ignored.py new file mode 100644 index 0000000..d06b72b --- /dev/null +++ b/tests/test_discovery_audit_get_include_ignored.py @@ -0,0 +1,4 @@ +from tests.helpers.utils import * + +def test(): + result = cli(["discovery-audit", "get", "-ii"]) From 685991d58c54bfbddd0032de32f5966d89a3ac0d Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 14 Oct 2024 11:40:52 -0700 Subject: [PATCH 22/56] Add docs API commands --- cortexapps_cli/cli.py | 2 ++ cortexapps_cli/commands/docs.py | 63 +++++++++++++++++++++++++++++++++ data/run-time/docs.yaml | 2 +- tests/test_docs.py | 15 ++++++++ 4 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 cortexapps_cli/commands/docs.py create mode 100644 tests/test_docs.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index e35a45f..c5e9c34 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -17,6 +17,7 @@ import cortexapps_cli.commands.dependencies as dependencies import cortexapps_cli.commands.deploys as deploys import cortexapps_cli.commands.discovery_audit as discovery_audit +import cortexapps_cli.commands.docs as docs import cortexapps_cli.commands.raw as raw import cortexapps_cli.commands.teams as teams @@ -31,6 +32,7 @@ app.add_typer(dependencies.app, name="dependencies") app.add_typer(deploys.app, name="deploys") app.add_typer(discovery_audit.app, name="discovery-audit") +app.add_typer(docs.app, name="docs") app.add_typer(raw.app, name="raw") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/docs.py b/cortexapps_cli/commands/docs.py new file mode 100644 index 0000000..a2f32c7 --- /dev/null +++ b/cortexapps_cli/commands/docs.py @@ -0,0 +1,63 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated +import yaml + +app = typer.Typer(help="Docs commands") + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(None, "--name", "-n", help="Name of the OpenAPI spec to return. If you have multiple OpenAPI specs configured for your entity as x-cortex-links, use this parameter to ensure the correct spec is returned. If this parameter is not specified, we will return the first OpenAPI spec found."), +): + """ + Get OpenAPI docs for entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/catalog/" + tag + "/documentation/openapi", params=params) + + print_json(data=r) + +@app.command() +def update( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing stringified JSON representation of the OpenAPI spec; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update OpenAPI docs for entity + """ + + client = ctx.obj["client"] + + yaml_content = yaml.safe_load("".join([line for line in file_input])) + + data = json.dumps({"spec": "" + str(yaml_content) + ""}) + + r = client.put("api/v1/catalog/" + tag + "/documentation/openapi", data=data) + + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete OpenAPI docs for entity + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/catalog/" + tag + "/documentation/openapi") diff --git a/data/run-time/docs.yaml b/data/run-time/docs.yaml index 2a4a2c9..053daca 100644 --- a/data/run-time/docs.yaml +++ b/data/run-time/docs.yaml @@ -6,7 +6,7 @@ paths: /: get: operationId: listVersionsv2 - summary: List API versions + summary: List API versions with 'full' history responses: "200": description: 200 response diff --git a/tests/test_docs.py b/tests/test_docs.py new file mode 100644 index 0000000..48a1014 --- /dev/null +++ b/tests/test_docs.py @@ -0,0 +1,15 @@ +from tests.helpers.utils import * + +def test_docs(): + cli(["docs", "update", "-t", "cli-test-service", "-f", "data/run-time/docs.yaml"]) + + response = cli(["docs", "get", "-t", "cli-test-service"]) + spec = json.loads(response['spec']) + assert spec['info']['title'] == "Simple API overview", "Returned spec should have a title named 'Simple API overview'" + + cli(["docs", "delete", "-t", "cli-test-service"]) + + result = cli(["docs", "get", "-t", "cli-test-service"], ReturnType.RAW) + out = result.stdout + assert "HTTP Error 404: Not Found" in out, "An HTTP 404 error code should be thrown" + assert result.exit_code == 1 From c680e4cd3d6259b869cb1e0c8fd161bf807df15f Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Tue, 15 Oct 2024 09:29:00 -0400 Subject: [PATCH 23/56] rename raw to rest --- cortexapps_cli/cli.py | 4 ++-- cortexapps_cli/commands/catalog.py | 6 ++++++ cortexapps_cli/commands/{raw.py => rest.py} | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) rename cortexapps_cli/commands/{raw.py => rest.py} (99%) diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index c5e9c34..508a690 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -18,7 +18,7 @@ import cortexapps_cli.commands.deploys as deploys import cortexapps_cli.commands.discovery_audit as discovery_audit import cortexapps_cli.commands.docs as docs -import cortexapps_cli.commands.raw as raw +import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) @@ -33,7 +33,7 @@ app.add_typer(deploys.app, name="deploys") app.add_typer(discovery_audit.app, name="discovery-audit") app.add_typer(docs.app, name="docs") -app.add_typer(raw.app, name="raw") +app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") # global options diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 151e986..4038231 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -99,6 +99,9 @@ def catalog_list( columns: ListCommandOptions.columns = [], filters: ListCommandOptions.filter = [], ): + """ + List entities in the catalog + """ client = ctx.obj["client"] if (table_output or csv_output) and not ctx.params.get('columns'): @@ -156,6 +159,9 @@ def details( columns: ListCommandOptions.columns = [], filters: ListCommandOptions.filter = [], ): + """ + Get details for a specific entity in the catalog + """ client = ctx.obj["client"] if table_output and csv_output: diff --git a/cortexapps_cli/commands/raw.py b/cortexapps_cli/commands/rest.py similarity index 99% rename from cortexapps_cli/commands/raw.py rename to cortexapps_cli/commands/rest.py index a39c9c0..54cfb78 100644 --- a/cortexapps_cli/commands/raw.py +++ b/cortexapps_cli/commands/rest.py @@ -7,7 +7,7 @@ from typing_extensions import Annotated from rich import print_json -app = typer.Typer() +app = typer.Typer(help="REST API commands") def parse_multi_value_option(option: List[str] | None) -> dict: if option is None: From fd991aadb9f031340a9ce2a31d41f35c644d17a7 Mon Sep 17 00:00:00 2001 From: Martin Stone Date: Tue, 15 Oct 2024 12:24:23 -0400 Subject: [PATCH 24/56] add table sorting, default to show usage when no command given --- cortexapps_cli/cli.py | 5 +++- cortexapps_cli/command_options.py | 33 +++++++++++++++++++++++ cortexapps_cli/commands/catalog.py | 42 ++++++------------------------ cortexapps_cli/commands/teams.py | 20 +++++++++++++- cortexapps_cli/utils.py | 16 ++++++++++-- 5 files changed, 78 insertions(+), 38 deletions(-) create mode 100644 cortexapps_cli/command_options.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 508a690..f4932f4 100644 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -21,7 +21,10 @@ import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams -app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) +app = typer.Typer( + no_args_is_help=True, + context_settings={"help_option_names": ["-h", "--help"]} +) # add subcommands app.add_typer(audit_logs.app, name="audit-logs") diff --git a/cortexapps_cli/command_options.py b/cortexapps_cli/command_options.py new file mode 100644 index 0000000..5186e73 --- /dev/null +++ b/cortexapps_cli/command_options.py @@ -0,0 +1,33 @@ +import typer +from typing import List, Optional +from typing_extensions import Annotated + +class ListCommandOptions: + table_output = Annotated[ + Optional[bool], + typer.Option("--table", help="Output the response as a table", show_default=False) # , callback=table_output_cb) + ] + csv_output = Annotated[ + Optional[bool], + typer.Option("--csv", help="Output the response as CSV", show_default=False) # , callback=csv_output_cb) + ] + columns = Annotated[ + Optional[List[str]], + typer.Option("--columns", "-C", help="Columns to include in the table, in the format HeaderName=jsonpath", show_default=False) + ] + filters = Annotated[ + Optional[List[str]], + typer.Option("--filter", "-F", help="Filters to apply on rows, in the format jsonpath=regex", show_default=False) + ] + sort = Annotated[ + Optional[List[str]], + typer.Option("--sort", "-S", help="Sort order to apply on rows, in the format jsonpath:asc or jsonpath:desc", show_default=False) + ] + page = Annotated[ + Optional[int], + typer.Option("--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages", show_default=False) + ] + page_size = Annotated[ + Optional[int], + typer.Option("--page-size", "-z", help="Page size for results", show_default=False) + ] diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 4038231..a58b36c 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -2,36 +2,11 @@ from typing import Optional, List from typing_extensions import Annotated +from cortexapps_cli.command_options import ListCommandOptions from cortexapps_cli.utils import print_output_with_context app = typer.Typer(help="Catalog commands") -class ListCommandOptions: - table_output = Annotated[ - Optional[bool], - typer.Option("--table", help="Output the response as a table", show_default=False) # , callback=table_output_cb) - ] - csv_output = Annotated[ - Optional[bool], - typer.Option("--csv", help="Output the response as CSV", show_default=False) # , callback=csv_output_cb) - ] - columns = Annotated[ - Optional[List[str]], - typer.Option("--columns", "-C", help="Columns to include in the table, in the format HeaderName=jsonpath", show_default=False) - ] - filter = Annotated[ - Optional[List[str]], - typer.Option("--filter", "-F", help="Filters to apply on rows, in the format jsonpath=regex", show_default=False) - ] - page = Annotated[ - Optional[int], - typer.Option("--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages", show_default=False) - ] - page_size = Annotated[ - Optional[int], - typer.Option("--page-size", "-z", help="Page size for results", show_default=False) - ] - class CatalogCommandOptions: include_archived = Annotated[ Optional[bool], @@ -90,14 +65,15 @@ def catalog_list( include_owners: CatalogCommandOptions.include_owners = False, include_links: CatalogCommandOptions.include_links = False, include_metadata: CatalogCommandOptions.include_metadata = False, - page: ListCommandOptions.page = None, - page_size: ListCommandOptions.page_size = 250, git_repositories: CatalogCommandOptions.git_repositories = None, types: CatalogCommandOptions.types = None, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], - filters: ListCommandOptions.filter = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List entities in the catalog @@ -145,7 +121,6 @@ def catalog_list( r = client.get("api/v1/catalog", params=params) data = r - # print_output(data=data, columns=columns, filters=filters, output_format=output_format) print_output_with_context(ctx, data) @app.command() @@ -157,7 +132,7 @@ def details( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], - filters: ListCommandOptions.filter = [], + filters: ListCommandOptions.filters = [], ): """ Get details for a specific entity in the catalog @@ -167,8 +142,8 @@ def details( if table_output and csv_output: raise typer.BadParameter("Only one of --table and --csv can be specified") - if (table_output or csv_output) and not columns: - columns = [ + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ "ID=id", "Tag=tag", "Name=name", @@ -189,5 +164,4 @@ def details( r = client.get("api/v1/catalog/" + tag, params=params) data = r if output_format == 'json' else [r] - #print_output(data=data, columns=columns, filters=filters, output_format=output_format) print_output_with_context(ctx, data) diff --git a/cortexapps_cli/commands/teams.py b/cortexapps_cli/commands/teams.py index ce97783..787b0da 100644 --- a/cortexapps_cli/commands/teams.py +++ b/cortexapps_cli/commands/teams.py @@ -6,6 +6,8 @@ from enum import Enum from cortexapps_cli.models.team import Team +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context app = typer.Typer(help="Teams commands") @@ -77,6 +79,13 @@ def create( def list( ctx: typer.Context, include_teams_without_members: bool = typer.Option(False, "--include-teams-without-members", help="Include teams without members"), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List teams @@ -84,11 +93,20 @@ def list( Provide a team tag to list one team, or list all teams if no tag is provided. """ client = ctx.obj["client"] + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "ID=id", + "Tag=teamTag", + "Name=metadata.name", + "Type=type", + ] + params = { "includeTeamsWithoutMembers": include_teams_without_members, } r = client.get("api/v1/teams", params=params) - print_json(json.dumps(r)) + print_output_with_context(ctx, r) @app.command() def get( diff --git a/cortexapps_cli/utils.py b/cortexapps_cli/utils.py index 3858914..69ee6bd 100644 --- a/cortexapps_cli/utils.py +++ b/cortexapps_cli/utils.py @@ -104,7 +104,7 @@ def humanize_value(value): return json.dumps(value, indent=2) return str(value) -def print_output(data, columns=None, filters=None, output_format='json'): +def print_output(data, columns=None, filters=None, sort=None, output_format='json'): """ Print output in the specified format. @@ -137,6 +137,7 @@ def print_output(data, columns=None, filters=None, output_format='json'): if not columns: raise typer.BadParameter("Columns must be specified when using --table or --csv") + columns = list(columns) for idx, column in enumerate(columns): if not re.match(r"^[a-zA-Z0-9_. ]+=[a-zA-Z0-9_.]+$", column): if re.match(r"^[a-zA-Z0-9_.]+$", column): @@ -154,6 +155,16 @@ def print_output(data, columns=None, filters=None, output_format='json'): column_accessors = [x.split('=')[1] for x in columns] rows = [] + if sort: + for sort_item in sort: + if not re.match(r"^[a-zA-Z0-9_.]+:(asc|ASC|desc|DESC)$", sort_item): + raise typer.BadParameter("Sort must be in the format jsonpath:asc or jsonpath:desc") + (jsonpath, order) = sort_item.split(':') + if order.lower() == 'asc': + table_data = sorted(table_data, key=lambda x: get_value_at_path(x, jsonpath)) + elif order.lower() == 'desc': + table_data = sorted(table_data, key=lambda x: get_value_at_path(x, jsonpath), reverse=True) + for item in table_data: if matches_filters(item, filters): rows.append([humanize_value(get_value_at_path(item, accessor)) for accessor in column_accessors]) @@ -174,6 +185,7 @@ def print_output(data, columns=None, filters=None, output_format='json'): def print_output_with_context(ctx: typer.Context, data): columns = ctx.params.get('columns', None) filters = ctx.params.get('filters', None) + sort = ctx.params.get('sort', None) table_output = ctx.params.get('table_output', None) csv_output = ctx.params.get('csv_output', None) if table_output and csv_output: @@ -184,4 +196,4 @@ def print_output_with_context(ctx: typer.Context, data): output_format = 'csv' else: output_format = 'json' - print_output(data, columns, filters, output_format) + print_output(data, columns=columns, filters=filters, sort=sort, output_format=output_format) From 3567fa9c597ea486c79491844516d9618644a608 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 16 Oct 2024 16:57:07 -0700 Subject: [PATCH 25/56] Use just for build --- .gitignore | 1 + Justfile | 39 +++++++++++++++++++++++++++++++++++++++ data/run-time/3 | 11 ----------- 3 files changed, 40 insertions(+), 11 deletions(-) create mode 100644 Justfile delete mode 100644 data/run-time/3 diff --git a/.gitignore b/.gitignore index b29761b..da0392a 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ coverage.json .github/workflows/test.yml .export report.html +.load-data-done diff --git a/Justfile b/Justfile new file mode 100644 index 0000000..23276a0 --- /dev/null +++ b/Justfile @@ -0,0 +1,39 @@ +cortex_cli := 'poetry run cortex2' +cortex_cli_orig := 'poetry run cortex -q' + +help: + @just -l + +# Run all tests +test-all: + poetry run pytest -rA -n auto --cov=cortexapps_cli --cov-append --cov-report term-missing tests + +# Run a single test, ie: just test tests/test_catalog.py +test testname: + poetry run pytest {{testname}} + +# Load data from 'data' directory into Cortex +load-data: + #!/bin/bash + if [[ -f .load-data-done ]] + then + echo "Not loading test data since .load-data-done file exists" + exit + fi + + # Delete existing entity definitions and any entities to prevent getting a conflict error. + # TODO: modify cli import to add a flag to manage this + for resource_file in `ls data/resource-definitions`; do + resource=$(basename ${resource_file} .json) + {{cortex_cli_orig}} catalog delete-by-type -t ${resource} + ({{cortex_cli_orig}} resource-definitions get -t ${resource} && {{cortex_cli_orig}} resource-definitions delete -t ${resource}) || : + {{cortex_cli_orig}} resource-definitions create -f data/resource-definitions/${resource_file} + done + + {{cortex_cli_orig}} backup import -d data + + # Archive a couple of entities in order to test commands that include or exclude archived entities + {{cortex_cli_orig}} catalog archive -t robot-item-sorter + {{cortex_cli_orig}} catalog archive -t inventory-scraper + + @touch .load-data-done diff --git a/data/run-time/3 b/data/run-time/3 deleted file mode 100644 index 36979f5..0000000 --- a/data/run-time/3 +++ /dev/null @@ -1,11 +0,0 @@ -- Stefanos - - scorecards - - used to be a lot faster, ran for hours - - permissions model - - viewer for users -> can request an exemption - - scorecards that target multiple entities, ie service or team - - -- Fred and Pradeep - - Lisa had a few questions - ad - - Migration global regional resources -> ran some script From 4bddfb7b58756827c4164783e3fa908d8b938670 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 16 Oct 2024 17:04:05 -0700 Subject: [PATCH 26/56] Partial implementation for entity types --- Justfile | 6 +-- cortexapps_cli/cli.py | 4 +- cortexapps_cli/commands/entity_types.py | 68 +++++++++++++++++++++++++ tests/test_entity_types.py | 17 +++++++ 4 files changed, 91 insertions(+), 4 deletions(-) mode change 100644 => 100755 cortexapps_cli/cli.py create mode 100644 cortexapps_cli/commands/entity_types.py create mode 100644 tests/test_entity_types.py diff --git a/Justfile b/Justfile index 23276a0..27b2f7a 100644 --- a/Justfile +++ b/Justfile @@ -5,7 +5,7 @@ help: @just -l # Run all tests -test-all: +test-all: load-data poetry run pytest -rA -n auto --cov=cortexapps_cli --cov-append --cov-report term-missing tests # Run a single test, ie: just test tests/test_catalog.py @@ -17,7 +17,7 @@ load-data: #!/bin/bash if [[ -f .load-data-done ]] then - echo "Not loading test data since .load-data-done file exists" + echo "Not loading test data since .load-data-done file exists." exit fi @@ -36,4 +36,4 @@ load-data: {{cortex_cli_orig}} catalog archive -t robot-item-sorter {{cortex_cli_orig}} catalog archive -t inventory-scraper - @touch .load-data-done + touch .load-data-done diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py old mode 100644 new mode 100755 index f4932f4..0999811 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -18,6 +18,7 @@ import cortexapps_cli.commands.deploys as deploys import cortexapps_cli.commands.discovery_audit as discovery_audit import cortexapps_cli.commands.docs as docs +import cortexapps_cli.commands.entity_types as entity_types import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -36,6 +37,7 @@ app.add_typer(deploys.app, name="deploys") app.add_typer(discovery_audit.app, name="discovery-audit") app.add_typer(docs.app, name="docs") +app.add_typer(entity_types.app, name="entity-types") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") @@ -56,7 +58,7 @@ def global_callback( if not api_key: raise typer.BadParameter("No API key provided and no config file found") create_config = False - + # check if we are in a terminal, if so, ask the user if they want to create a config file if sys.stdin.isatty() and sys.stdout.isatty(): create_config = typer.confirm("No config file found. Do you want to create one?") diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py new file mode 100644 index 0000000..7586d55 --- /dev/null +++ b/cortexapps_cli/commands/entity_types.py @@ -0,0 +1,68 @@ +from collections import defaultdict +from datetime import datetime +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Entity Types commands") + +@app.command() +def list( + ctx: typer.Context, + include_built_in: bool = typer.Option(False, "--include-built-in", "-ib", help="When true, returns the built-in entity types that Cortex provides, such as rds and s3, defaults to false"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + List entity types, excludes Cortex default types of service, domain, and team + """ + + client = ctx.obj["client"] + + params = { + "includeBuiltIn": include_built_in, + "page": page, + "pageSize": page_size, + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/definitions", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/definitions", params=params) + + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + entity_type: str = typer.Option(..., "--type", "-ty", help="The entity type"), +): + """ + Delete entity type + """ + + client = ctx.obj["client"] + + client.delete("api/v1/catalog/definitions/" + entity_type) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom entity definition; can be passed as stdin with -, example: -f-")] = None, +): + """ + Create entity type + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + r = client.post("api/v1/catalog/definitions/" + entity_type) + print_json(data=r) diff --git a/tests/test_entity_types.py b/tests/test_entity_types.py new file mode 100644 index 0000000..7ac9e20 --- /dev/null +++ b/tests/test_entity_types.py @@ -0,0 +1,17 @@ +from tests.helpers.utils import * + +def test_resource_definitions(capsys): + response = cli(["entity-types", "list"]) + entity_types = response['definitions'] + assert any(definition['type'] == 'api' for definition in entity_types), "Should find entity type named 'api'" + + if any(definition['type'] == 'test-entity-type' for definition in entity_types): + cli(["entity-types", "delete", "-ty", "test-entity-type"]) +# cli(["entity-types", "create", "-f", "tests/test-resource-definition.json"]) +# +# cli(["entity-types", "list"]) +# assert any(definition['type'] == 'test-entity-type' for definition in response['definitions']), "Should find entity type named 'test-entity-type'" +# +# cli(["entity-types", "get", "-t", "test-resource-definition"]) +# +# cli(["entity-types", "update", "-t", "test-resource-definition", "-f", "tests/test-resource-definition-update.json"]) From 6e762f6b9b75dc91d55db59fb73bac9bb56c475b Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 1 Nov 2024 16:34:47 -0700 Subject: [PATCH 27/56] Add GitOps Logs. Could use some additional test coverage. --- Justfile | 22 ++++++++++- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/gitops_logs.py | 54 ++++++++++++++++++++++++++ tests/test_gitops_logs.py | 9 +++++ 4 files changed, 85 insertions(+), 2 deletions(-) create mode 100644 cortexapps_cli/commands/gitops_logs.py create mode 100644 tests/test_gitops_logs.py diff --git a/Justfile b/Justfile index 27b2f7a..411332a 100644 --- a/Justfile +++ b/Justfile @@ -4,14 +4,32 @@ cortex_cli_orig := 'poetry run cortex -q' help: @just -l +_check-vars: + #!/bin/bash + if [ -z ${CORTEX_API_KEY+x} ] + then + echo "CORTEX_API_KEY environment variable is not set." + exit 1 + fi + + if [ -z ${CORTEX_BASE_URL+x} ] + then + echo "CORTEX_BASE_URL environment variable is not set." + exit + fi + # Run all tests -test-all: load-data +test-all: _check-vars load-data poetry run pytest -rA -n auto --cov=cortexapps_cli --cov-append --cov-report term-missing tests # Run a single test, ie: just test tests/test_catalog.py -test testname: +test testname: _check-vars poetry run pytest {{testname}} +# Run all tests for an API function, assumes all tests named test_* +test-suite command: _check-vars + poetry run pytest -k test_{{command}} + # Load data from 'data' directory into Cortex load-data: #!/bin/bash diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 0999811..7217c0c 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -19,6 +19,7 @@ import cortexapps_cli.commands.discovery_audit as discovery_audit import cortexapps_cli.commands.docs as docs import cortexapps_cli.commands.entity_types as entity_types +import cortexapps_cli.commands.gitops_logs as gitops_logs import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -38,6 +39,7 @@ app.add_typer(discovery_audit.app, name="discovery-audit") app.add_typer(docs.app, name="docs") app.add_typer(entity_types.app, name="entity-types") +app.add_typer(gitops_logs.app, name="gitops-logs") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py new file mode 100644 index 0000000..916e608 --- /dev/null +++ b/cortexapps_cli/commands/gitops_logs.py @@ -0,0 +1,54 @@ +#from collections import defaultdict +from enum import Enum +import json +from rich import print_json +import typer + +app = typer.Typer(help="GitOps Logs commands") + +class Operation(str, Enum): + ARCHIVED = "ARCHIVED" + CREATED = "CREATED" + NO_CHANGE = "NO_CHANGE" + UPDATED = "UPDATED" + +@app.command() +def get( + ctx: typer.Context, + file: str = typer.Option(None, "--file", "-f", help="File name within the repository"), + file_name: str = typer.Option(None, "--file-name", "-fn", help="File name within the repository; TODO: what is difference with this and file parm?"), + repository: str = typer.Option(None, "--repository", "-r", help="Repository name as defined in your Git provider"), + sha: str = typer.Option(None, "--sha", "-s", help="Commit SHA"), + operation: Operation = typer.Option(None, "--operation", "-o", help="One of CREATED, UPDATED, ARCHIVED, NO_CHANGE"), + error_only: bool = typer.Option(False, "--error-only", "-eo", help="Only include entries with errors"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Retrieve GitOps logs. API key must have the 'View GitOps logs' permission. + """ + + client = ctx.obj["client"] + + params = { + "errorOnly": error_only, + "file": file, + "fileName": file_name, + "operation": operation, + "page": page, + "pageSize": page_size, + "repository": repository, + "sha": sha + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/gitops-logs", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/gitops-logs", params=params) + + print_json(data=r) diff --git a/tests/test_gitops_logs.py b/tests/test_gitops_logs.py new file mode 100644 index 0000000..d4aab20 --- /dev/null +++ b/tests/test_gitops_logs.py @@ -0,0 +1,9 @@ +from tests.helpers.utils import * + +# This just ensures getting all logs does not fail. Could probably get rid of this test. +def test_gitops_logs_get(): + cli(["gitops-logs", "get"]) + +def test_gitops_logs_page_size(capsys): + response = cli(["gitops-logs", "get", "-p", "1", "-z", "5"]) + assert len(response['logs']) == 5, "Changing page size should return requested amount of entries" From 32607f3deb80a167a6e776960b58452136147500 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 4 Nov 2024 11:17:44 -0800 Subject: [PATCH 28/56] Add groups --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/gitops_logs.py | 1 - cortexapps_cli/commands/groups.py | 72 ++++++++++++++++++++++++++ tests/test_groups.py | 10 ++++ 4 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 cortexapps_cli/commands/groups.py create mode 100644 tests/test_groups.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 7217c0c..6c72135 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -20,6 +20,7 @@ import cortexapps_cli.commands.docs as docs import cortexapps_cli.commands.entity_types as entity_types import cortexapps_cli.commands.gitops_logs as gitops_logs +import cortexapps_cli.commands.groups as groups import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -40,6 +41,7 @@ app.add_typer(docs.app, name="docs") app.add_typer(entity_types.app, name="entity-types") app.add_typer(gitops_logs.app, name="gitops-logs") +app.add_typer(groups.app, name="groups") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py index 916e608..0cb6962 100644 --- a/cortexapps_cli/commands/gitops_logs.py +++ b/cortexapps_cli/commands/gitops_logs.py @@ -1,4 +1,3 @@ -#from collections import defaultdict from enum import Enum import json from rich import print_json diff --git a/cortexapps_cli/commands/groups.py b/cortexapps_cli/commands/groups.py new file mode 100644 index 0000000..fdb0b8f --- /dev/null +++ b/cortexapps_cli/commands/groups.py @@ -0,0 +1,72 @@ +import json +from rich import print_json +import typer + +app = typer.Typer(help="Groups commands") + +@app.command() +def get( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Get groups for entity. + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag_or_id + "/groups", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag_or_id + "/groups", params=params) + + print_json(data=r) + +@app.command() +def add( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + groups: str = typer.Option(..., "--groups", "-g", help="Comma-delimited list of groups to add to the entity") +): + """ + Add groups to entity. + """ + + client = ctx.obj["client"] + + data = { + "groups": [{"tag": x.strip()} for x in groups.split(',')] + } + + r = client.put("api/v1/catalog/" + tag_or_id + "/groups", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + groups: str = typer.Option(..., "--groups", "-g", help="Comma-delimited list of groups to delete from the entity") +): + """ + Delete groups from entity. + """ + + client = ctx.obj["client"] + + data = { + "groups": [{"tag": x.strip()} for x in groups.split(',')] + } + + r = client.delete("api/v1/catalog/" + tag_or_id + "/groups", data=data) diff --git a/tests/test_groups.py b/tests/test_groups.py new file mode 100644 index 0000000..b8fda33 --- /dev/null +++ b/tests/test_groups.py @@ -0,0 +1,10 @@ +from tests.helpers.utils import * + +def test_groups(): + cli(["groups", "add", "-t", "test-service", "-g", "test-group-2,test-group-3"]) + response = cli(["groups", "get", "-t", "test-service"]) + assert any(group['tag'] == 'test-group-2' for group in response['groups']), "Should find group named test-group-2 in entity test-service" + + cli(["groups", "delete", "-t", "test-service", "-g", "test-group-2,test-group-3"]) + response = cli(["groups", "get", "-t", "test-service"]) + assert not(any(group['tag'] == 'test-group-2' for group in response['groups'])), "After delete, should not find group named test-group-2 in entity test-service" From 9a30031ea43b09259264dea81ac7686721f40c5c Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 4 Nov 2024 16:55:52 -0800 Subject: [PATCH 29/56] Add IP allowlist --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/ip_allowlist.py | 96 +++++++++++++++++++++++++ tests/test_ip_allowlist.py | 15 ++++ 3 files changed, 113 insertions(+) create mode 100644 cortexapps_cli/commands/ip_allowlist.py create mode 100644 tests/test_ip_allowlist.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 6c72135..d5aaaa7 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -21,6 +21,7 @@ import cortexapps_cli.commands.entity_types as entity_types import cortexapps_cli.commands.gitops_logs as gitops_logs import cortexapps_cli.commands.groups as groups +import cortexapps_cli.commands.ip_allowlist as ip_allowlist import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -42,6 +43,7 @@ app.add_typer(entity_types.app, name="entity-types") app.add_typer(gitops_logs.app, name="gitops-logs") app.add_typer(groups.app, name="groups") +app.add_typer(ip_allowlist.app, name="ip-allowlist") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/ip_allowlist.py b/cortexapps_cli/commands/ip_allowlist.py new file mode 100644 index 0000000..e4591ae --- /dev/null +++ b/cortexapps_cli/commands/ip_allowlist.py @@ -0,0 +1,96 @@ +import json +from rich import print_json +import typer + +app = typer.Typer(help="IP Allowlist commands") + +@app.command() +def get( + ctx: typer.Context, + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Get allowlist of IP addresses & ranges + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/ip-allowlist", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/ip-allowlist", params=params) + + print_json(data=r) + +@app.command() +def replace( + ctx: typer.Context, + addresses: str = typer.Option(..., "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'"), + force: bool = typer.Option(False, "--force", "-o", help="When true, entries will be updated even if the list doesn't contain the requestor's IP address") +): + """ + Replace existing allowlist with provided list of IP addresses & ranges + """ + + client = ctx.obj["client"] + + data = { + "entries": [{"address": x.split(':')[0], "description": None if len(x.split(':')) < 2 else x.split(':')[1]} for x in addresses.split(',')] + } + + params = { + "force": force, + } + + r = client.put("api/v1/ip-allowlist", data=data, params=params) + + print_json(data=r) + + +@app.command() +def validate( + ctx: typer.Context, + addresses: str = typer.Option(..., "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'") +): + """ + Validates allowlist of IP addresses & ranges + """ + + client = ctx.obj["client"] + + data = { + "entries": [{"address": x.split(':')[0], "description": None if len(x.split(':')) < 2 else x.split(':')[1]} for x in addresses.split(',')] + } + + r = client.post("api/v1/ip-allowlist/validate", data=data) + + print_json(data=r) + +@app.command() +def remove_all( + ctx: typer.Context, +): + """ + Remove all entries from allowlist + """ + + client = ctx.obj["client"] + + data = { + "entries": [] + } + + r = client.put("api/v1/ip-allowlist", data=data) + + print_json(data=r) diff --git a/tests/test_ip_allowlist.py b/tests/test_ip_allowlist.py new file mode 100644 index 0000000..ea66382 --- /dev/null +++ b/tests/test_ip_allowlist.py @@ -0,0 +1,15 @@ +from tests.helpers.utils import * +import requests + +def test(capsys, tmp_path): + ip_address = requests.get("https://ip.me").text.strip() + ip_param = ip_address + ":My current IP" + cli(["ip-allowlist", "validate", "-a", ip_param]) + cli(["ip-allowlist", "replace", "-a", ip_param]) + + response = cli(["ip-allowlist", "get"]) + assert response['entries'][0]['address'] == ip_address, "Should have a single IP address in allowlist" + + cli(["ip-allowlist", "remove-all"]) + response = cli(["ip-allowlist", "get"]) + assert len(response['entries']) == 0, "Should not have any entries in allowlist" From b718befd67d295e5131d05f35ea29b50c9716cc3 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Tue, 5 Nov 2024 14:34:22 -0800 Subject: [PATCH 30/56] Add oncall. No tests for now because we cannot ensure test environment has on-call configured --- cortexapps_cli/cli.py | 2 ++ cortexapps_cli/commands/audit_logs.py | 11 +------- cortexapps_cli/commands/custom_data.py | 16 +++-------- cortexapps_cli/commands/custom_events.py | 14 +++------- cortexapps_cli/commands/custom_metrics.py | 11 +------- cortexapps_cli/commands/dependencies.py | 11 ++------ cortexapps_cli/commands/deploys.py | 13 +++------ cortexapps_cli/commands/entity_types.py | 9 +------ cortexapps_cli/commands/gitops_logs.py | 13 +++------ cortexapps_cli/commands/groups.py | 9 +------ cortexapps_cli/commands/ip_allowlist.py | 11 +------- cortexapps_cli/commands/on_call.py | 33 +++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 11 ++++++++ 13 files changed, 65 insertions(+), 99 deletions(-) create mode 100644 cortexapps_cli/commands/on_call.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index d5aaaa7..0a181f7 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -22,6 +22,7 @@ import cortexapps_cli.commands.gitops_logs as gitops_logs import cortexapps_cli.commands.groups as groups import cortexapps_cli.commands.ip_allowlist as ip_allowlist +import cortexapps_cli.commands.on_call as on_call import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -44,6 +45,7 @@ app.add_typer(gitops_logs.app, name="gitops-logs") app.add_typer(groups.app, name="groups") app.add_typer(ip_allowlist.app, name="ip-allowlist") +app.add_typer(on_call.app, name="on-call") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index d5b1ac5..1813af0 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -2,8 +2,6 @@ from enum import Enum import typer -from rich import print_json - app = typer.Typer(help="Audit log commands") class Action(str, Enum): @@ -70,11 +68,4 @@ def get( if str(type(v)) == "": params[k] = ','.join(v) - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/audit-logs", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/audit-logs", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/audit-logs", page, params=params) diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 3ae6ea9..40a30fc 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -58,7 +58,7 @@ def add( data["description"] = description r = client.post("api/v1/catalog/" + tag + "/custom-data", data=data, params=params) - print_json(json.dumps(r)) + print_json(data=r) @app.command() def bulk( @@ -78,7 +78,7 @@ def bulk( } r = client.put("api/v1/catalog/custom-data", data=data, params=params) - print_json(json.dumps(r)) + print_json(data=r) @app.command() def delete( @@ -131,14 +131,4 @@ def list( "pageSize": page_size } - if page is None: - # if page is not specified, we want to fetch all pages - # Not working: https://cortex1.atlassian.net/browse/CET-13655 - #r = client.fetch("api/v1/catalog/" + tag + "/custom-data", params=params) - r = client.get("api/v1/catalog/" + tag + "/custom-data", params=params) - pass - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/" + tag + "/custom-data", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/catalog/" + tag + "/custom-data", page, params=params) diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py index 7553892..2d37de3 100644 --- a/cortexapps_cli/commands/custom_events.py +++ b/cortexapps_cli/commands/custom_events.py @@ -69,7 +69,7 @@ def update_by_uuid( data[k] = v.strftime('%Y-%m-%dT%H:%M:%S') r = client.put("api/v1/catalog/" + tag + "/custom-events/" + uuid, data=data) - print_json(json.dumps(r)) + print_json(data=r) @app.command() def create( @@ -122,7 +122,7 @@ def create( data[k] = v.strftime('%Y-%m-%dT%H:%M:%S') r = client.post("api/v1/catalog/" + tag + "/custom-events", data=data) - print_json(json.dumps(r)) + print_json(data=r) @app.command() def delete_all( @@ -181,14 +181,7 @@ def list( if str(type(v)) == "": params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/catalog/" + tag + "/custom-events", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/" + tag + "/custom-events", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/catalog/" + tag + "/custom-events", page, params=params) @app.command() def get_by_uuid( @@ -202,7 +195,6 @@ def get_by_uuid( client = ctx.obj["client"] r = client.get("api/v1/catalog/" + tag + "/custom-events/" + uuid) - print_json(data=r) @app.command() diff --git a/cortexapps_cli/commands/custom_metrics.py b/cortexapps_cli/commands/custom_metrics.py index 426d12f..9a1a508 100644 --- a/cortexapps_cli/commands/custom_metrics.py +++ b/cortexapps_cli/commands/custom_metrics.py @@ -1,7 +1,5 @@ from collections import defaultdict from datetime import datetime -import json -from rich import print_json import typer from typing_extensions import Annotated @@ -56,14 +54,7 @@ def get( params = _convert_datetime_to_string(params) - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, params=params) - - print_json(json.dumps(r)) + client.fetch_or_get("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, page, params=params) @app.command() def add( diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py index 62bce2d..ae3625a 100644 --- a/cortexapps_cli/commands/dependencies.py +++ b/cortexapps_cli/commands/dependencies.py @@ -134,14 +134,7 @@ def get_all( client = ctx.obj["client"] - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/catalog/" + caller_tag + "/dependencies", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/" + caller_tag + "/dependencies", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/catalog/" + caller_tag + "/dependencies", page, params=params) @app.command() def delete( @@ -235,4 +228,4 @@ def update( data["description"] = description r = client.put("api/v1/catalog/" + caller_tag + "/dependencies/" + callee_tag, data=data, params=params) - print_json(json.dumps(r)) + print_json(data=r) diff --git a/cortexapps_cli/commands/deploys.py b/cortexapps_cli/commands/deploys.py index 0a0424e..b97e83d 100644 --- a/cortexapps_cli/commands/deploys.py +++ b/cortexapps_cli/commands/deploys.py @@ -121,14 +121,7 @@ def deploys_list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/catalog/" + tag + "/deploys", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/" + tag + "/deploys", params=params) - - print_json(json.dumps(r)) + client.fetch_or_get("api/v1/catalog/" + tag + "/deploys", page, params=params) @app.command() def add( @@ -177,7 +170,7 @@ def add( data["timestamp"] = data["timestamp"].strftime('%Y-%m-%dT%H:%M:%SZ') r = client.post("api/v1/catalog/" + tag + "/deploys", data=data) - print_json(json.dumps(r)) + print_json(data=r) @app.command() def delete_by_uuid( @@ -245,4 +238,4 @@ def update_by_uuid( data["timestamp"] = data["timestamp"].strftime('%Y-%m-%dT%H:%M:%SZ') r = client.put("api/v1/catalog/" + tag + "/deploys/" + uuid, data=data) - print_json(json.dumps(r)) + print_json(data=r) diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py index 7586d55..432512c 100644 --- a/cortexapps_cli/commands/entity_types.py +++ b/cortexapps_cli/commands/entity_types.py @@ -30,14 +30,7 @@ def list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/catalog/definitions", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/definitions", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/catalog/definitions", page, params=params) @app.command() def delete( diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py index 0cb6962..e6aa43b 100644 --- a/cortexapps_cli/commands/gitops_logs.py +++ b/cortexapps_cli/commands/gitops_logs.py @@ -1,6 +1,6 @@ from enum import Enum -import json -from rich import print_json +#import json +#from rich import print_json import typer app = typer.Typer(help="GitOps Logs commands") @@ -43,11 +43,4 @@ def get( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/gitops-logs", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/gitops-logs", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/gitops-logs", page, params=params) diff --git a/cortexapps_cli/commands/groups.py b/cortexapps_cli/commands/groups.py index fdb0b8f..77fc12b 100644 --- a/cortexapps_cli/commands/groups.py +++ b/cortexapps_cli/commands/groups.py @@ -25,14 +25,7 @@ def get( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/catalog/" + tag_or_id + "/groups", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/" + tag_or_id + "/groups", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/catalog/" + tag_or_id + "/groups", page, params=params) @app.command() def add( diff --git a/cortexapps_cli/commands/ip_allowlist.py b/cortexapps_cli/commands/ip_allowlist.py index e4591ae..30c3102 100644 --- a/cortexapps_cli/commands/ip_allowlist.py +++ b/cortexapps_cli/commands/ip_allowlist.py @@ -1,5 +1,3 @@ -import json -from rich import print_json import typer app = typer.Typer(help="IP Allowlist commands") @@ -24,14 +22,7 @@ def get( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/ip-allowlist", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/ip-allowlist", params=params) - - print_json(data=r) + client.fetch_or_get("api/v1/ip-allowlist", page, params=params) @app.command() def replace( diff --git a/cortexapps_cli/commands/on_call.py b/cortexapps_cli/commands/on_call.py new file mode 100644 index 0000000..3070f05 --- /dev/null +++ b/cortexapps_cli/commands/on_call.py @@ -0,0 +1,33 @@ +import typer +import json +from rich import print_json + +app = typer.Typer(help="On Call commands") + +@app.command() +def get( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Retrieve current on-call for entity + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag_or_id + "/integrations/oncall/current") + print_json(data=r) + +@app.command() +def get_registration( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Retrieve on-call registration for entity + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag_or_id + "/integrations/oncall/registration") + print_json(data=r) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index a5db7d4..477b6a8 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -2,6 +2,7 @@ import json import typer from rich import print +from rich import print_json from cortexapps_cli.utils import guess_data_key @@ -105,6 +106,16 @@ def fetch(self, endpoint, params={}, headers={}): data_key: data, } + def fetch_or_get(self, endpoint, page, params={}): + if page is None: + # if page is not specified, we want to fetch all pages + r = self.fetch(endpoint, params=params) + else: + # if page is specified, we want to fetch only that page + r = self.get(endpoint, params=params) + + print_json(data=r) + def get_entity(self, entity_tag: str, entity_type: str = ''): match entity_type.lower(): case 'team' | 'teams': From da9f467601098a1e5342fe4800b40a0bd4b90717 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 6 Nov 2024 16:57:03 -0800 Subject: [PATCH 31/56] Add packages --- cortexapps_cli/commands/packages.py | 59 +++++++++++++++++ .../commands/packages_commands/go.py | 37 +++++++++++ .../commands/packages_commands/java.py | 51 +++++++++++++++ .../commands/packages_commands/node.py | 65 +++++++++++++++++++ .../commands/packages_commands/nuget.py | 50 ++++++++++++++ .../commands/packages_commands/python.py | 51 +++++++++++++++ 6 files changed, 313 insertions(+) create mode 100644 cortexapps_cli/commands/packages.py create mode 100644 cortexapps_cli/commands/packages_commands/go.py create mode 100644 cortexapps_cli/commands/packages_commands/java.py create mode 100644 cortexapps_cli/commands/packages_commands/node.py create mode 100644 cortexapps_cli/commands/packages_commands/nuget.py create mode 100644 cortexapps_cli/commands/packages_commands/python.py diff --git a/cortexapps_cli/commands/packages.py b/cortexapps_cli/commands/packages.py new file mode 100644 index 0000000..4d00164 --- /dev/null +++ b/cortexapps_cli/commands/packages.py @@ -0,0 +1,59 @@ +import json +from rich import print_json +import typer +import cortexapps_cli.commands.packages_commands.go as go +import cortexapps_cli.commands.packages_commands.java as java +import cortexapps_cli.commands.packages_commands.python as python +import cortexapps_cli.commands.packages_commands.node as node +import cortexapps_cli.commands.packages_commands.nuget as nuget + +app = typer.Typer(help="Packages commands") +app.add_typer(go.app, name="go") +app.add_typer(java.app, name="java") +app.add_typer(python.app, name="python") +app.add_typer(node.app, name="node") +app.add_typer(nuget.app, name="nuget") + +@app.command() +def list( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + List packages for entity + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/catalog/" + tag_or_id + "/packages", page, params=params) + +@app.command() +def delete_all( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete all packages for entity + """ + + client = ctx.obj["client"] + + response = client.get("api/v1/catalog/" + tag_or_id + "/packages") + for package in response: + name = package['name'] + package_type = package['packageType'] + if package_type == "NUGET": + package_path = "dotnet/nuget" + else: + package_path = package_type.lower() + client.delete("api/v1/catalog/" + tag_or_id + "/packages/" + package_path, params={"name": name}) diff --git a/cortexapps_cli/commands/packages_commands/go.py b/cortexapps_cli/commands/packages_commands/go.py new file mode 100644 index 0000000..015a001 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/go.py @@ -0,0 +1,37 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Go commands") + +@app.command() +def upload( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of go.sum; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload go.sum package + """ + + client = ctx.obj["client"] + + #client.post("api/v1/catalog/" + tag_or_id + "/packages/go/gosum", data=package_input.read(), content_type='application/text') + client.post("api/v1/catalog/" + tag_or_id + "/packages/go/gosum", data=package_input.read()) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete go package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/go", params=params) diff --git a/cortexapps_cli/commands/packages_commands/java.py b/cortexapps_cli/commands/packages_commands/java.py new file mode 100644 index 0000000..91beec7 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/java.py @@ -0,0 +1,51 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Java commands") + +@app.command() +def upload_single( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing package name and version; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload single Java package + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/java", data=package_input.read()) + +@app.command() +def upload_multiple( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing multiple package names and versions; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload multiple Java packages + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/java/bulk", data=package_input.read()) + + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete Java package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/java", params=params) diff --git a/cortexapps_cli/commands/packages_commands/node.py b/cortexapps_cli/commands/packages_commands/node.py new file mode 100644 index 0000000..6e592f7 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/node.py @@ -0,0 +1,65 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Node commands") + +@app.command() +def upload_package_json( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of package.json; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload node package.json file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/node/package-json", data=package_input.read()) + +@app.command() +def upload_package_lock( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of package.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload node package-lock.json file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/node/package-lock", data=package_input.read()) + +@app.command() +def upload_yarn_lock( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of yarn.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload node yarn.lock file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/node/yarn-lock", data=package_input.read()) + + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete node package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/node", params=params) diff --git a/cortexapps_cli/commands/packages_commands/nuget.py b/cortexapps_cli/commands/packages_commands/nuget.py new file mode 100644 index 0000000..6b745ef --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/nuget.py @@ -0,0 +1,50 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="NuGet commands") + +@app.command() +def upload_csproj( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of NuGet csproj file; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload NuGet csproj file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/dotnet/nuget/csproj", data=package_input.read()) + +@app.command() +def upload_packages_lock( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of NuGet packages.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload NuGet packages.lock file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/dotnet/nuget/packages-lock", data=package_input.read()) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete NuGet package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/dotnet/nuget", params=params) diff --git a/cortexapps_cli/commands/packages_commands/python.py b/cortexapps_cli/commands/packages_commands/python.py new file mode 100644 index 0000000..6efac78 --- /dev/null +++ b/cortexapps_cli/commands/packages_commands/python.py @@ -0,0 +1,51 @@ +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Python commands") + +@app.command() +def upload_pipfile( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of pipfile.lock; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload python pipfile.lock file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/python/pipfile", data=package_input.read(), content_type='application/text') + +@app.command() +def upload_requirements( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + package_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of requirements.txt; can be passed as stdin with -, example: -f-")] = None, +): + """ + Upload python requirements.txt file + """ + + client = ctx.obj["client"] + + client.post("api/v1/catalog/" + tag_or_id + "/packages/python/requirements", data=package_input.read(), content_type='application/text') + + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + name: str = typer.Option(..., "--name", "-n", help="The name of the package to delete"), +): + """ + Delete python package from entity + """ + + client = ctx.obj["client"] + + params = { + "name": name + } + + client.delete("api/v1/catalog/" + tag_or_id + "/packages/python", params=params) From 512ab55523869b1aa16941ccd93ba59583bcdb84 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 6 Nov 2024 16:57:13 -0800 Subject: [PATCH 32/56] Add packages --- cortexapps_cli/cli.py | 2 + cortexapps_cli/cortex_client.py | 3 + tests/test_packages.py | 68 ++++++++++++++++++++ tests/test_packages_go.sum | 2 + tests/test_packages_java_multiple.json | 10 +++ tests/test_packages_java_single.json | 4 ++ tests/test_packages_node_package.json | 22 +++++++ tests/test_packages_node_package_lock.json | 17 +++++ tests/test_packages_node_yarn.lock | 19 ++++++ tests/test_packages_nuget.csproj | 22 +++++++ tests/test_packages_nuget_packages_lock.json | 19 ++++++ tests/test_packages_python_pipfile.lock | 59 +++++++++++++++++ tests/test_packages_python_requirements.txt | 8 +++ 13 files changed, 255 insertions(+) create mode 100644 tests/test_packages.py create mode 100644 tests/test_packages_go.sum create mode 100644 tests/test_packages_java_multiple.json create mode 100644 tests/test_packages_java_single.json create mode 100644 tests/test_packages_node_package.json create mode 100644 tests/test_packages_node_package_lock.json create mode 100644 tests/test_packages_node_yarn.lock create mode 100644 tests/test_packages_nuget.csproj create mode 100644 tests/test_packages_nuget_packages_lock.json create mode 100644 tests/test_packages_python_pipfile.lock create mode 100644 tests/test_packages_python_requirements.txt diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 0a181f7..73fb82a 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -23,6 +23,7 @@ import cortexapps_cli.commands.groups as groups import cortexapps_cli.commands.ip_allowlist as ip_allowlist import cortexapps_cli.commands.on_call as on_call +import cortexapps_cli.commands.packages as packages import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -46,6 +47,7 @@ app.add_typer(groups.app, name="groups") app.add_typer(ip_allowlist.app, name="ip-allowlist") app.add_typer(on_call.app, name="on-call") +app.add_typer(packages.app, name="packages") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 477b6a8..e9be1db 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -151,3 +151,6 @@ def unarchive_entity(self, entity_tag: str, entity_type: str = ''): path_for_type = 'catalog' return self.put(f'api/v1/{path_for_type}/{entity_tag}/unarchive') + + def read_file(self, file): + return file.read() diff --git a/tests/test_packages.py b/tests/test_packages.py new file mode 100644 index 0000000..4a3b829 --- /dev/null +++ b/tests/test_packages.py @@ -0,0 +1,68 @@ +from tests.helpers.utils import * + +def test_packages(): + cli(["packages", "go", "upload", "-t", "test-service", "-f", "tests/test_packages_go.sum"]) + + cli(["packages", "java", "upload-single", "-t", "test-service", "-f", "tests/test_packages_java_single.json"]) + + cli(["packages", "java", "upload-multiple", "-t", "test-service", "-f", "tests/test_packages_java_multiple.json"]) + + # upload-pipfile will replace any existing PYTHON package entries for an entity. It's assumed you will use either + # pipfile.lock or requirements.txt, but not both. + # So we need to test here because these packages will be overwritten by the upload-requirements command. + cli(["packages", "python", "upload-pipfile", "-t", "test-service", "-f", "tests/test_packages_python_pipfile.lock"]) + response = cli(["packages", "list", "-t", "test-service"]) + assert any(package['name'] == 'certifi' and package['packageType'] == "PYTHON" for package in response), "Should find Python pipfile package" + + cli(["packages", "python", "upload-requirements", "-t", "test-service", "-f", "tests/test_packages_python_requirements.txt"]) + + # Similar store for Node as Python. Only one file type is supported. + cli(["packages", "node", "upload-package-json", "-t", "test-service", "-f", "tests/test_packages_node_package.json"]) + response = cli(["packages", "list", "-t", "test-service"]) + assert any(package['name'] == 'clean-css' and package['packageType'] == "NODE" for package in response), "Should find Node package.json package" + + cli(["packages", "node", "upload-package-lock", "-t", "test-service", "-f", "tests/test_packages_node_package_lock.json"]) + response = cli(["packages", "list", "-t", "test-service"]) + assert any(package['name'] == '@angular/common' and package['packageType'] == "NODE" for package in response), "Should find Node package.lock package" + + cli(["packages", "node", "upload-yarn-lock", "-t", "test-service", "-f", "tests/test_packages_node_yarn.lock"]) + + cli(["packages", "nuget", "upload-packages-lock", "-t", "test-service", "-f", "tests/test_packages_nuget_packages_lock.json"]) + + cli(["packages", "nuget", "upload-csproj", "-t", "test-service", "-f", "tests/test_packages_nuget.csproj"]) + + response = cli(["packages", "list", "-t", "test-service"]) + assert any(package['name'] == 'github.com/cortex.io/catalog' and package['packageType'] == "GO" for package in response), "Should find GO package" + assert any(package['name'] == 'io.cortex.scorecards' and package['packageType'] == "JAVA" for package in response), "Should find single-updated Java package" + assert any(package['name'] == 'io.cortex.teams' and package['packageType'] == "JAVA" for package in response), "Should find multiple-update Java package" + assert any(package['name'] == 'cycler' and package['packageType'] == "PYTHON" for package in response), "Should find Python requirement.txt package" + assert any(package['name'] == '@types/babylon' and package['packageType'] == "NODE" for package in response), "Should find Node yarn.lock package" + assert any(package['name'] == 'MicroBuild.Core' and package['packageType'] == "NUGET" for package in response), "Should find NuGet package" + + cli(["packages", "go", "delete", "-t", "test-service", "-n", "github.com/cortex.io/catalog"]) + + cli(["packages", "java", "delete", "-t", "test-service", "-n", "io.cortex.scorecards"]) + cli(["packages", "java", "delete", "-t", "test-service", "-n", "io.cortex.teams"]) + + cli(["packages", "python", "delete", "-t", "test-service", "-n", "cycler"]) + + cli(["packages", "node", "delete", "-t", "test-service", "-n", "@types/babylon"]) + + cli(["packages", "nuget", "delete", "-t", "test-service", "-n", "MicroBuild.Core"]) + + response = cli(["packages", "list", "-t", "test-service"]) + + assert not any(package['name'] == 'github.com/cortex.io/catalog' and package['packageType'] == "GO" for package in response), "Should not find deleted GO package" + + assert not any(package['name'] == 'io.cortex.scorecards' and package['packageType'] == "JAVA" for package in response), "Should not find deleted single-updated Java package" + assert not any(package['name'] == 'io.cortex.teams' and package['packageType'] == "JAVA" for package in response), "Should not find deleted multiple-update Java package" + + assert not any(package['name'] == 'cycler' and package['packageType'] == "PYTHON" for package in response), "Should not find deleted Python requirement.txt package" + + assert not any(package['name'] == '@types/babylon' and package['packageType'] == "NODE" for package in response), "Should not find deleted Node yarn.lock package" + + assert not any(package['name'] == 'MicroBuild.Core' and package['packageType'] == "NUGET" for package in response), "Should not find deleted NuGet package" + + cli(["packages", "delete-all", "-t", "test-service"]) + response = cli(["packages", "list", "-t", "test-service"]) + assert len(response) == 0, "Should not find any packages after delete-all" diff --git a/tests/test_packages_go.sum b/tests/test_packages_go.sum new file mode 100644 index 0000000..6fc50b0 --- /dev/null +++ b/tests/test_packages_go.sum @@ -0,0 +1,2 @@ +github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/cortex.io/catalog v4.5.6+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= diff --git a/tests/test_packages_java_multiple.json b/tests/test_packages_java_multiple.json new file mode 100644 index 0000000..e2e0c14 --- /dev/null +++ b/tests/test_packages_java_multiple.json @@ -0,0 +1,10 @@ +[ + { + "name": "io.cortex.catalog", + "version": "4.5.6" + }, + { + "name": "io.cortex.teams", + "version": "3.3.3" + } +] diff --git a/tests/test_packages_java_single.json b/tests/test_packages_java_single.json new file mode 100644 index 0000000..d89a92f --- /dev/null +++ b/tests/test_packages_java_single.json @@ -0,0 +1,4 @@ +{ + "name": "io.cortex.scorecards", + "version": "1.2.3" +} diff --git a/tests/test_packages_node_package.json b/tests/test_packages_node_package.json new file mode 100644 index 0000000..31e2e7d --- /dev/null +++ b/tests/test_packages_node_package.json @@ -0,0 +1,22 @@ +{ + "name": "app", + "version": "0.0.0", + "private": true, + "scripts": { + "start": "node ./bin/www" + }, + "dependencies": { + "clean-css": "^4.1.11", + "constantinople": "^3.1.1", + "cookie-parser": "~1.4.4", + "debug": "~2.6.9", + "express": "~4.16.1", + "http-errors": "~1.6.3", + "lorem-ipsum": "^2.0.3", + "md5": "^2.2.1", + "mersenne-twister": "^1.1.0", + "morgan": "~1.9.1", + "pug": "^2.0.4", + "uglify-js": "^2.6.0" + } +} diff --git a/tests/test_packages_node_package_lock.json b/tests/test_packages_node_package_lock.json new file mode 100644 index 0000000..b32fe75 --- /dev/null +++ b/tests/test_packages_node_package_lock.json @@ -0,0 +1,17 @@ +{ + "name": "intern-angular", + "version": "1.0.0", + "lockfileVersion": 1, + "dependencies": { + "@angular/animations": { + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.2.6.tgz", + "integrity": "sha1-nZyAoRmwwDaTy9I7uvcosVMf/8c=" + }, + "@angular/common": { + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@angular/common/-/common-4.2.6.tgz", + "integrity": "sha1-IQrOS9JON1+LQbpS/rNLGKiH1do=" + } + } +} diff --git a/tests/test_packages_node_yarn.lock b/tests/test_packages_node_yarn.lock new file mode 100644 index 0000000..980810c --- /dev/null +++ b/tests/test_packages_node_yarn.lock @@ -0,0 +1,19 @@ +"@types/babel-types@*", "@types/babel-types@^7.0.0": + version "7.0.7" + resolved "https://registry.yarnpkg.com/@types/babel-types/-/babel-types-7.0.7.tgz#667eb1640e8039436028055737d2b9986ee336e3" + integrity sha512-dBtBbrc+qTHy1WdfHYjBwRln4+LWqASWakLHsWHR2NWHIFkv4W3O070IGoGLEBrJBvct3r0L1BUPuvURi7kYUQ== + +"@types/babylon@^6.16.2": + version "6.16.5" + resolved "https://registry.yarnpkg.com/@types/babylon/-/babylon-6.16.5.tgz#1c5641db69eb8cdf378edd25b4be7754beeb48b4" + integrity sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w== + dependencies: + "@types/babel-types" "*" + +accepts@~1.3.5: + version "1.3.7" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" + integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== + dependencies: + mime-types "~2.1.24" + negotiator "0.6.2" diff --git a/tests/test_packages_nuget.csproj b/tests/test_packages_nuget.csproj new file mode 100644 index 0000000..72b9a92 --- /dev/null +++ b/tests/test_packages_nuget.csproj @@ -0,0 +1,22 @@ + + + + + 0.20.0 + + + 0.3.0 + runtime; build; native; contentfiles; analyzers + all + + + 7.1.1 + + + 1.1.1 + + + 4.5.0 + + + diff --git a/tests/test_packages_nuget_packages_lock.json b/tests/test_packages_nuget_packages_lock.json new file mode 100644 index 0000000..3b09240 --- /dev/null +++ b/tests/test_packages_nuget_packages_lock.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "dependencies": { + ".NETCoreApp,Version=v3.1": { + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.0, )", + "resolved": "1.0.0", + "contentHash": "7D2TMufjGiowmt0E941kVoTIS+GTNzaPopuzM1/1LSaJAdJdBrVP0SkZW7AgDd0a2U1DjsIeaKG1wxGVBNLDMw==" + }, + "Newtonsoft.Json": { + "type": "Direct", + "requested": "[12.0.3, )", + "resolved": "12.0.3", + "contentHash": "6mgjfnRB4jKMlzHSl+VD+oUc1IebOZabkbyWj2RiTgWwYPPuaK1H97G1sHqGwPlS5npiF5Q0OrxN1wni2n5QWg==" + } + } + } +} diff --git a/tests/test_packages_python_pipfile.lock b/tests/test_packages_python_pipfile.lock new file mode 100644 index 0000000..983cd83 --- /dev/null +++ b/tests/test_packages_python_pipfile.lock @@ -0,0 +1,59 @@ +{ + "_meta": { + "hash": { + "sha256": "bb57e0d7853b45999e47c163c46b95bc2fde31c527d8d7b5b5539dc979444a6d" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.7" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "certifi": { + "hashes": [ + "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", + "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" + ], + "index": "pypi", + "version": "==2022.12.7" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "idna": { + "hashes": [ + "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", + "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" + ], + "version": "==2.8" + }, + "requests": { + "hashes": [ + "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", + "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" + ], + "index": "pypi", + "version": "==2.21.0" + }, + "urllib3": { + "hashes": [ + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", + "version": "==1.24.3" + } + }, + "develop": {} +} diff --git a/tests/test_packages_python_requirements.txt b/tests/test_packages_python_requirements.txt new file mode 100644 index 0000000..1d7bc68 --- /dev/null +++ b/tests/test_packages_python_requirements.txt @@ -0,0 +1,8 @@ +contourpy==1.0.6 + # via matplotlib +cycler==0.11.0 + # via matplotlib +fonttools==4.43.0 + # via matplotlib +kiwisolver==1.4.4 + # via matplotlib From 1bcb3e73f52d83ad3079d86c852a74778e7f550b Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Thu, 7 Nov 2024 16:23:31 -0800 Subject: [PATCH 33/56] Add plugins --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/plugins.py | 83 ++++++++++++++++++++++++++++++ tests/test_plugins.json | 19 +++++++ tests/test_plugins.py | 19 +++++++ tests/test_plugins_update.json | 18 +++++++ 5 files changed, 141 insertions(+) create mode 100644 cortexapps_cli/commands/plugins.py create mode 100644 tests/test_plugins.json create mode 100644 tests/test_plugins.py create mode 100644 tests/test_plugins_update.json diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 73fb82a..da11cd0 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -24,6 +24,7 @@ import cortexapps_cli.commands.ip_allowlist as ip_allowlist import cortexapps_cli.commands.on_call as on_call import cortexapps_cli.commands.packages as packages +import cortexapps_cli.commands.plugins as plugins import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -48,6 +49,7 @@ app.add_typer(ip_allowlist.app, name="ip-allowlist") app.add_typer(on_call.app, name="on-call") app.add_typer(packages.app, name="packages") +app.add_typer(plugins.app, name="plugins") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/plugins.py b/cortexapps_cli/commands/plugins.py new file mode 100644 index 0000000..f7cb0c9 --- /dev/null +++ b/cortexapps_cli/commands/plugins.py @@ -0,0 +1,83 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Plugins commands") + +@app.command() +def list( + ctx: typer.Context, + include_drafts: bool = typer.Option(False, "--include-drafts", "-i", help="Also include plugins that are in draft mode"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Retrieve a list of all plugins, excluding drafts + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/plugins", page, params=params) + +@app.command() +def create( + ctx: typer.Context, + plugin_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of plugin using schema defined at https://docs.cortex.io/docs/api/create-plugin")] = None +): + """ + Create a new plugin + """ + + client = ctx.obj["client"] + + client.post("api/v1/plugins", data=plugin_input.read()) + +@app.command() +def delete( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Delete a plugin by tag + """ + + client = ctx.obj["client"] + + client.delete("api/v1/plugins/" + tag_or_id) + +@app.command() +def get( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Retrieve the metadata of a plugin by tag + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/plugins/" + tag_or_id) + print_json(data=r) + +@app.command() +def replace( + ctx: typer.Context, + plugin_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of plugin using schema defined at https://docs.cortex.io/docs/api/create-plugin")] = None, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") +): + """ + Replace an existing plugin by tag + """ + + client = ctx.obj["client"] + + client.put("api/v1/plugins/"+ tag_or_id, data=plugin_input.read()) diff --git a/tests/test_plugins.json b/tests/test_plugins.json new file mode 100644 index 0000000..cbb98af --- /dev/null +++ b/tests/test_plugins.json @@ -0,0 +1,19 @@ +{ + "blob": "

This is a simple plugin

", + "contexts": [ + { + "type": "GLOBAL" + }, + { + "entityFilter": { + "type": "SERVICE_FILTER" + }, + "type": "ENTITY" + } + ], + "description": "Simple Plugin", + "isDraft": false, + "minimumRoleRequired": "VIEWER", + "name": "My Test Plugin", + "tag": "my-test-plugin" +} diff --git a/tests/test_plugins.py b/tests/test_plugins.py new file mode 100644 index 0000000..affccd7 --- /dev/null +++ b/tests/test_plugins.py @@ -0,0 +1,19 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["plugins", "list"]) + + if any(plugin['tag'] == 'my-test-plugin' for plugin in response['plugins']): + cli(["plugins", "delete", "-t", "my-test-plugin"]) + + cli(["plugins", "create", "-f", "tests/test_plugins.json"]) + response = cli(["plugins", "list"]) + assert any(plugin['tag'] == 'my-test-plugin' for plugin in response['plugins']), "Plugin named my-test-plugin should be in list of plugins" + + cli(["plugins", "replace", "-t", "my-test-plugin", "-f", "tests/test_plugins_update.json"]) + response = cli(["plugins", "get", "-t", "my-test-plugin"]) + assert response['tag'] == "my-test-plugin", "Plugin named my-test-plugin should be returned by get" + + cli(["plugins", "delete", "-t", "my-test-plugin"]) + response = cli(["plugins", "list"]) + assert not(any(plugin['tag'] == 'my-test-plugin' for plugin in response['plugins'])), "Plugin named my-test-plugin should have been deleted" diff --git a/tests/test_plugins_update.json b/tests/test_plugins_update.json new file mode 100644 index 0000000..7f41141 --- /dev/null +++ b/tests/test_plugins_update.json @@ -0,0 +1,18 @@ +{ + "blob": "

This is my updated simple plugin

", + "contexts": [ + { + "type": "GLOBAL" + }, + { + "entityFilter": { + "type": "SERVICE_FILTER" + }, + "type": "ENTITY" + } + ], + "description": "Simple Updated Plugin", + "isDraft": false, + "minimumRoleRequired": "VIEWER", + "name": "My Updated Test Plugin" +} From 47a6405c67589eef6803bdb5a022d1f3327666c1 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 8 Nov 2024 08:37:02 -0800 Subject: [PATCH 34/56] Add queries --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/queries.py | 64 ++++++++++++++++++++++++++++++ tests/test_queries.py | 16 ++++++++ tests/test_queries.txt | 1 + 4 files changed, 83 insertions(+) create mode 100644 cortexapps_cli/commands/queries.py create mode 100644 tests/test_queries.py create mode 100644 tests/test_queries.txt diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index da11cd0..5d35538 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -25,6 +25,7 @@ import cortexapps_cli.commands.on_call as on_call import cortexapps_cli.commands.packages as packages import cortexapps_cli.commands.plugins as plugins +import cortexapps_cli.commands.queries as queries import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.teams as teams @@ -50,6 +51,7 @@ app.add_typer(on_call.app, name="on-call") app.add_typer(packages.app, name="packages") app.add_typer(plugins.app, name="plugins") +app.add_typer(queries.app, name="queries") app.add_typer(rest.app, name="rest") app.add_typer(teams.app, name="teams") diff --git a/cortexapps_cli/commands/queries.py b/cortexapps_cli/commands/queries.py new file mode 100644 index 0000000..72d61f8 --- /dev/null +++ b/cortexapps_cli/commands/queries.py @@ -0,0 +1,64 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Queries commands") + +@app.command() +def run( + ctx: typer.Context, + query_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing JSON-formatted CQL query; can be passed as stdin with -, example: -f-")] = None, + wait: bool = typer.Option(False, "--wait", "-w", help="Optional; wait for query to complete"), + timeout: int | None = typer.Option(None, "--timeout", "-t", help="Page size for results") +): + """ + Run CQL query + """ + + client = ctx.obj["client"] + + data = {} + data['query'] = query_input.read() + + r = client.post("api/v1/queries", data=data) + + if wait: + job_id = r['jobId'] + sleep_interval = 2 + max_attempts = int(timeout) // sleep_interval + + done = False + for attempt in range(1, max_attempts): + r = client.get("api/v1/queries/" + job_id) + if r['status'] == "DONE": + done = True + break + else: + if attempt == max_attempts: + break + time.sleep(sleep_interval) + + if not done: + print("failed to find job id " + job_id + " in DONE state within " + str(args.timeout) + " seconds") + print(str(out)) + sys.exit(2) + else: + print_json(data=r) + else: + print_json(data=r) + + +@app.command() +def get( + ctx: typer.Context, + job_id: str = typer.Option(..., "--job-id", "-j", help="The job id of the CQL query") +): + """ + Retrieve the status and results of a CQL query + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/queries/" + job_id) + print_json(data=r) diff --git a/tests/test_queries.py b/tests/test_queries.py new file mode 100644 index 0000000..54fa2a1 --- /dev/null +++ b/tests/test_queries.py @@ -0,0 +1,16 @@ +from tests.helpers.utils import * +from urllib.error import HTTPError + +def test(): + try: + response = cli(["queries", "run", "-f", "tests/test_queries.txt"]) + except HTTPError as e: + status_code = e.response.status_code + if status_code == "409": + print("Query with same CQL is already running") + except: + print("Got an error for which I was not prepared. It's me. Not you.") + else: + job_id = response["jobId"] + response = cli(["queries", "get", "-j", job_id]) + assert response["queryDetails"]['jobId'] == job_id, "Should return query with same jobId returned by queries run" diff --git a/tests/test_queries.txt b/tests/test_queries.txt new file mode 100644 index 0000000..d44e612 --- /dev/null +++ b/tests/test_queries.txt @@ -0,0 +1 @@ +tag = "test-service" and custom("foo") = "bar" From 4ce1f2a3552d9831479f18a2255cc84a44f2bf5d Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 8 Nov 2024 15:18:06 -0800 Subject: [PATCH 35/56] Add partial SCIM commands --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/scim.py | 77 +++++++++++++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 3 ++ tests/test_scim.py | 20 +++++++++ 4 files changed, 102 insertions(+) create mode 100644 cortexapps_cli/commands/scim.py create mode 100644 tests/test_scim.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 5d35538..5c85377 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -27,6 +27,7 @@ import cortexapps_cli.commands.plugins as plugins import cortexapps_cli.commands.queries as queries import cortexapps_cli.commands.rest as rest +import cortexapps_cli.commands.scim as scim import cortexapps_cli.commands.teams as teams app = typer.Typer( @@ -53,6 +54,7 @@ app.add_typer(plugins.app, name="plugins") app.add_typer(queries.app, name="queries") app.add_typer(rest.app, name="rest") +app.add_typer(scim.app, name="scim") app.add_typer(teams.app, name="teams") # global options diff --git a/cortexapps_cli/commands/scim.py b/cortexapps_cli/commands/scim.py new file mode 100644 index 0000000..7486c44 --- /dev/null +++ b/cortexapps_cli/commands/scim.py @@ -0,0 +1,77 @@ +from enum import Enum +import json +from rich import print_json +import typer +import urllib.parse + +app = typer.Typer(help="SCIM commands") + +# As of November 2024, sortBy and sortOrder are not supported in our code and result in a 501 error +# Not sure how domain is supposed to be used so leaving it out too +# Couldn't get patch, delete and add to work so leaving them out until I can do further research +@app.command() +def list( + ctx: typer.Context, + attributes: str = typer.Option(None, "--attributes", "-a", help="Comma-separated list of attributes to include in response; example: name.familyName,active"), + count: int | None = typer.Option(None, "--count", "-c", help="Return only the first 'count' results"), + excluded_attributes: str = typer.Option(None, "--excluded-attributes", "-e", help="Comma-separated list of attributes to exclude from response; example: name.givenName,emails"), + filter: str = typer.Option(None, "--filter", "-f", help="Filtering only supported for userName, example: 'userName eq anish@cortex.io'"), + start_index: int | None = typer.Option(None, "--start-index", "-s", help="Return items starting with index number, indexing starts with 1") +): + """ + Get users based on provided criteria + """ + + client = ctx.obj["client"] + + params = { + "attributes": attributes, + "excludedAttributes": excluded_attributes, + "filter": filter, + "startIndex": start_index, + "count": count + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("scim/v2/Users", params=urllib.parse.urlencode(params)) + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + attributes: str = typer.Option(None, "--attributes", "-a", help="Comma-separated list of attributes to include in response; example: name.familyName,active"), + excluded_attributes: str = typer.Option(None, "--excluded-attributes", "-e", help="Comma-separated list of attributes to exclude from response; example: name.givenName,emails"), + id: str = typer.Option(..., "--id", "-i", help="SCIM id of user to get"), +): + """ + Gets a user based on id + """ + + client = ctx.obj["client"] + + params = { + "attributes": attributes, + "excludedAttributes": excluded_attributes + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("scim/v2/Users/" + id, params=urllib.parse.urlencode(params)) + print_json(data=r) + +# I get a 403 when testing this in my environment, but leaving in because it's syntactically correct +@app.command() +def delete( + ctx: typer.Context, + id: str = typer.Option(..., "--id", "-i", help="SCIM id of user to delete"), +): + """ + Delete a user based on id + """ + + client = ctx.obj["client"] + + r = client.delete("scim/v2/Users/" + id) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index e9be1db..435db79 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -62,6 +62,9 @@ def post(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_res def put(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): return self.request('PUT', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) + def patch(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): + return self.request('PATCH', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) + def delete(self, endpoint, data={}, params={}, headers={}, raw_response=False): return self.request('DELETE', endpoint, data=data, params=params, headers=headers, raw_response=raw_response) diff --git a/tests/test_scim.py b/tests/test_scim.py new file mode 100644 index 0000000..2f96ae7 --- /dev/null +++ b/tests/test_scim.py @@ -0,0 +1,20 @@ +from tests.helpers.utils import * + +# These tests work against jeff-sandbox tenant. +# Need a plan to figure out how we can ensure we have SCIM enabled in an environment to not skip these tests. +def test(): + response = cli(["scim", "list"]) + assert any(user['userName'] == 'jeff.schnitter@proton.me' for user in response['Resources']), "Should find user jeff.schnitter@proton.me" + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me"]) + assert response['Resources'][0]['userName'] == 'jeff.schnitter@proton.me', "Should find user jeff.schnitter@proton.me" + id = response['Resources'][0]['id'] + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-a", "name.familyName"]) + assert response['Resources'][0]['name']['familyName'] == 'Schnitter', "Should find family Name" + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-e", "name.familyName"]) + assert 'familyName' not in response['Resources'][0]['name'].keys(), "Should not have familyName in response" + + response = cli(["scim", "get", "--id", id]) + assert response['id'] == id, "Should find matching id based on query" From b3f1a41aa37e556b20d15d69507975dcaa94d59a Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Mon, 18 Nov 2024 11:27:59 -0800 Subject: [PATCH 36/56] Partially completed implementation of scorecards, need to finish some tests --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/scorecards.py | 154 ++++++++++++++++++ .../scorecards_commands/exemptions.py | 93 +++++++++++ tests/helpers/utils.py | 2 + tests/test_scorecards.py | 70 ++++++++ tests/test_scorecards.yaml | 27 +++ tests/test_scorecards_draft.yaml | 19 +++ 7 files changed, 367 insertions(+) create mode 100644 cortexapps_cli/commands/scorecards.py create mode 100644 cortexapps_cli/commands/scorecards_commands/exemptions.py create mode 100644 tests/test_scorecards.py create mode 100644 tests/test_scorecards.yaml create mode 100644 tests/test_scorecards_draft.yaml diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 5c85377..107e95b 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -28,6 +28,7 @@ import cortexapps_cli.commands.queries as queries import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.scim as scim +import cortexapps_cli.commands.scorecards as scorecards import cortexapps_cli.commands.teams as teams app = typer.Typer( @@ -55,6 +56,7 @@ app.add_typer(queries.app, name="queries") app.add_typer(rest.app, name="rest") app.add_typer(scim.app, name="scim") +app.add_typer(scorecards.app, name="scorecards") app.add_typer(teams.app, name="teams") # global options diff --git a/cortexapps_cli/commands/scorecards.py b/cortexapps_cli/commands/scorecards.py new file mode 100644 index 0000000..450a973 --- /dev/null +++ b/cortexapps_cli/commands/scorecards.py @@ -0,0 +1,154 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +import cortexapps_cli.commands.scorecards_commands.exemptions as exemptions + +app = typer.Typer(help="Scorecards commands", + no_args_is_help=True) +app.add_typer(exemptions.app, name="exemptions") + +@app.command() +def create( + ctx: typer.Context, + input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing YAML representation of scorecard, can be passed as stdin with -, example: -f-")] = None, + dry_run: bool = typer.Option(False, "--dry-run", "-d", help="When true, this endpoint only validates the descriptor contents and returns any errors or warnings"), +): + """ + Create or update a Scorecard using the descriptor YAML. The operation is determined by the existence of a Scorecard with the same tag as passed in the descriptor. + """ + + client = ctx.obj["client"] + + params = { + "dryRun": dry_run + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.post("api/v1/scorecards/descriptor", params=params, data=input.read(), content_type="application/yaml;charset=UTF-8") + +@app.command() +def delete( + ctx: typer.Context, + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Delete scorecard + """ + + client = ctx.obj["client"] + + client.delete("api/v1/scorecards/" + scorecard_tag) + +@app.command() +def list( + ctx: typer.Context, + show_drafts: bool = typer.Option(False, "--show-drafts", "-s", help="Whether scorecard in draft mode should be included"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + List scorecards + """ + + client = ctx.obj["client"] + + params = { + "showDrafts": show_drafts + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/scorecards", page, params=params) + +@app.command() +def shield( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Retrieve scorecard shields.io badge + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/badge") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Get scorecard + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/scorecards/" + scorecard_tag) + print_json(data=r) + +@app.command() +def descriptor( + ctx: typer.Context, + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Get scorecards YAML descriptor + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/scorecards/" + scorecard_tag + "/descriptor") + print(r) + +@app.command() +def next_steps( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), +): + """ + Retrieve next steps for entity in scorecard + """ + + client = ctx.obj["client"] + + params = { + "entityTag": tag_or_id + } + + r = client.get("api/v1/scorecards/" + scorecard_tag + "/next-steps", params=params) + print_json(data=r) + +@app.command() +def scores( + ctx: typer.Context, + tag_or_id: str | None = typer.Option(None, "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + page: int = typer.Option(0, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Return latest scores for all entities in the Scorecard + """ + + client = ctx.obj["client"] + + params = { + "entityTag": tag_or_id, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.fetch_or_get("api/v1/scorecards/" + scorecard_tag + "/scores", page, params=params) + diff --git a/cortexapps_cli/commands/scorecards_commands/exemptions.py b/cortexapps_cli/commands/scorecards_commands/exemptions.py new file mode 100644 index 0000000..8461029 --- /dev/null +++ b/cortexapps_cli/commands/scorecards_commands/exemptions.py @@ -0,0 +1,93 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Exemptions commands", + no_args_is_help=True) + +@app.command() +def request( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + days: int = typer.Option(0, "--days", "-d", help="Number of days that rule should be exempt. If not set, rule will be exempt until revoked."), + reason: str = typer.Option(..., "--reason", "-r", help="Reason for creating exemption"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Request Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "days": days, + "reason": reason, + "ruleIdentifier": rule_identifier + } + + r = client.post("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption", data=data) + print_json(data=r) + +@app.command() +def approve( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Approve Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "ruleIdentifier": rule_identifier + } + + r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/approve", data=data) + print_json(data=r) + +@app.command() +def deny( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Deny Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "ruleIdentifier": rule_identifier + } + + r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/deny", data=data) + print_json(data=r) + +@app.command() +def revoke( + ctx: typer.Context, + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + reason: str = typer.Option(..., "--reason", "-r", help="Reason for creating exemption"), + rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), +): + """ + Revoke Scorecard rule exemption + """ + + client = ctx.obj["client"] + + data = { + "reason": reason, + "ruleIdentifier": rule_identifier + } + + r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/revoke", data=data) + print_json(data=r) diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py index cc978e8..1c6330b 100644 --- a/tests/helpers/utils.py +++ b/tests/helpers/utils.py @@ -4,8 +4,10 @@ from datetime import timezone from enum import Enum import json +import os import pytest from typer.testing import CliRunner +from unittest import mock runner = CliRunner() diff --git a/tests/test_scorecards.py b/tests/test_scorecards.py new file mode 100644 index 0000000..56ff682 --- /dev/null +++ b/tests/test_scorecards.py @@ -0,0 +1,70 @@ +from tests.helpers.utils import * +import yaml + +# Get rule id to be used in exemption tests. Need to revoke any existing rules. +def _get_rule(num): + response = cli(["scorecards", "get", "-s", "test-scorecard"]) + key = os.environ['CORTEX_API_KEY'] + print("here in _get_rule . . ., CORTEX_API_KEY = " + key) + return response['scorecard']['rules'][num]['identifier'] + +def test_scorecards(): + cli(["scorecards", "create", "-f", "tests/test_scorecards.yaml"]) + + response = cli(["scorecards", "list"]) + assert any(scorecard['tag'] == 'test-scorecard' for scorecard in response['scorecards']), "Should find scorecard with tag test-scorecard" + + response = cli(["scorecards", "shield", "-s", "test-scorecard", "-t", "test-service"]) + assert "img.shields.io" in response['value'], "shields url should be included in string" + + response = cli(["scorecards", "get", "-s", "test-scorecard"]) + assert response['scorecard']['tag'] == "test-scorecard", "JSON response should have scorecard tag" + + + response = cli(["scorecards", "descriptor", "-s", "test-scorecard"], return_type=ReturnType.STDOUT) + assert "Used to test Cortex CLI" in response, "description of scorecard found in descriptor" + + # cannot rely on a scorecard evaluation being complete, so not performing any validation + cli(["scorecards", "next-steps", "-s", "test-scorecard", "-t", "test-service"]) +# +# # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. +# # 2024-05-06, additionally now blocked by CET-8882 +# # cli(["scorecards", "scores", "-t", "test-scorecard", "-e", "test-service"]) +# +# cli(["scorecards", "scores", "-t", "test-scorecard"]) +# +#def test_scorecards_drafts(capsys): +# cli(["scorecards", "create", "-f", "tests/test_scorecards_draft.yaml"]) +# +# cli(["scorecards", "list", "-s"]) +# out, err = capsys.readouterr() +# +# out = json.loads(out) +# assert any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in out['scorecards']) + +@pytest.fixture(scope='session') +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) +def test_exemption_that_will_be_approved(): + rule_id = _get_rule(0) + print("rule_id = " + rule_id) + response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test approve", "-ri", rule_id, "-d", "100"]) + +@pytest.mark.usefixtures('test_exemption_that_will_be_approved') +def test_approve_exemption(): + rule_id = _get_rule(0) + print("rule_id = " + rule_id) + response = cli(["scorecards", "exemptions", "approve", "-s", "test-scorecard", "-t", "test-service", "-ri", rule_id]) + response = cli(["scorecards", "exemptions", "revoke", "-s", "test-scorecard", "-t", "test-service", "-r", "I revoke you", "-ri", rule_id]) + +@pytest.fixture(scope='session') +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) +def test_exemption_that_will_be_denied(): + rule_id = _get_rule(1) + print("rule_id = " + rule_id) + response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test deny", "-ri", rule_id, "-d", "100"]) + +@pytest.mark.usefixtures('test_exemption_that_will_be_denied') +def test_deny_exemption(): + rule_id = _get_rule(1) + print("rule_id = " + rule_id) + response = cli(["scorecards", "exemptions", "deny", "-s", "test-scorecard", "-t", "test-service", "-r", "I deny, therefore I am", "-ri", rule_id]) diff --git a/tests/test_scorecards.yaml b/tests/test_scorecards.yaml new file mode 100644 index 0000000..1edc99d --- /dev/null +++ b/tests/test_scorecards.yaml @@ -0,0 +1,27 @@ +tag: test-scorecard +name: Test Scorecard +description: Used to test Cortex CLI +draft: false +ladder: + name: Default Ladder + levels: + - name: You Made It + rank: 1 + description: "My boring description" + color: 7cf376 +rules: +- title: Is Definitely False + expression: custom("testField") == "100" + weight: 1 + level: You Made It + filter: + category: SERVICE +- title: Has Custom Data + expression: custom("testField") != null + weight: 1 + level: You Made It + filter: + category: SERVICE +filter: + query: 'entity.tag() == "test-service"' + category: SERVICE diff --git a/tests/test_scorecards_draft.yaml b/tests/test_scorecards_draft.yaml new file mode 100644 index 0000000..c16e9c1 --- /dev/null +++ b/tests/test_scorecards_draft.yaml @@ -0,0 +1,19 @@ +tag: test-scorecard-draft +name: Test Scorecard Draft +description: Used to test Cortex CLI +draft: true +ladder: + name: Default Ladder + levels: + - name: You Made It + rank: 1 + description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" + color: 7cf376 +rules: +- title: Has Custom Data + expression: custom("testField") != null + weight: 1 + level: You Made It +filter: + query: entity_descriptor.info.`x-cortex-tag` = "cli-test-service" + category: SERVICE From 8bceaf1540716c6fa851a155ecbaeeb9f2004e4f Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Tue, 19 Nov 2024 17:00:18 -0800 Subject: [PATCH 37/56] Finish scorecards tests --- .../scorecards_commands/exemptions.py | 4 +- tests/test_scorecards.py | 74 +++++++++++++------ 2 files changed, 56 insertions(+), 22 deletions(-) diff --git a/cortexapps_cli/commands/scorecards_commands/exemptions.py b/cortexapps_cli/commands/scorecards_commands/exemptions.py index 8461029..78f27e2 100644 --- a/cortexapps_cli/commands/scorecards_commands/exemptions.py +++ b/cortexapps_cli/commands/scorecards_commands/exemptions.py @@ -56,6 +56,7 @@ def deny( tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), rule_identifier: str = typer.Option(..., "--rule-id", "-ri", help="Identifier of the Scorecard rule to request exemption for"), + reason: str = typer.Option(..., "--reason", "-r", help="Reason for creating exemption"), ): """ Deny Scorecard rule exemption @@ -64,7 +65,8 @@ def deny( client = ctx.obj["client"] data = { - "ruleIdentifier": rule_identifier + "ruleIdentifier": rule_identifier, + "reason": reason } r = client.put("api/v1/scorecards/" + scorecard_tag + "/entity/" + tag_or_id + "/exemption/deny", data=data) diff --git a/tests/test_scorecards.py b/tests/test_scorecards.py index 56ff682..a50421f 100644 --- a/tests/test_scorecards.py +++ b/tests/test_scorecards.py @@ -1,12 +1,13 @@ from tests.helpers.utils import * import yaml -# Get rule id to be used in exemption tests. Need to revoke any existing rules. -def _get_rule(num): +# Get rule id to be used in exemption tests. +# TODO: check for and revoke any PENDING exemptions. +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY']}) +def _get_rule(title): response = cli(["scorecards", "get", "-s", "test-scorecard"]) - key = os.environ['CORTEX_API_KEY'] - print("here in _get_rule . . ., CORTEX_API_KEY = " + key) - return response['scorecard']['rules'][num]['identifier'] + rule_id = [rule['identifier'] for rule in response['scorecard']['rules'] if rule['title'] == title] + return rule_id[0] def test_scorecards(): cli(["scorecards", "create", "-f", "tests/test_scorecards.yaml"]) @@ -20,51 +21,82 @@ def test_scorecards(): response = cli(["scorecards", "get", "-s", "test-scorecard"]) assert response['scorecard']['tag'] == "test-scorecard", "JSON response should have scorecard tag" - response = cli(["scorecards", "descriptor", "-s", "test-scorecard"], return_type=ReturnType.STDOUT) assert "Used to test Cortex CLI" in response, "description of scorecard found in descriptor" # cannot rely on a scorecard evaluation being complete, so not performing any validation cli(["scorecards", "next-steps", "-s", "test-scorecard", "-t", "test-service"]) -# + + response = cli(["scorecards", "scores", "-s", "test-scorecard", "-t", "test-service"]) + assert response['scorecardTag'] == "test-scorecard", "Should get valid response that include test-scorecard" + # # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. # # 2024-05-06, additionally now blocked by CET-8882 # # cli(["scorecards", "scores", "-t", "test-scorecard", "-e", "test-service"]) # # cli(["scorecards", "scores", "-t", "test-scorecard"]) + +def test_scorecards_drafts(): + cli(["scorecards", "create", "-f", "tests/test_scorecards_draft.yaml"]) + + response = cli(["scorecards", "list", "-s"]) + assert any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in response['scorecards']) + + cli(["scorecards", "delete", "-s", "test-scorecard-draft"]) + response = cli(["scorecards", "list", "-s"]) + assert not(any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in response['scorecards'])), "should not find deleted scorecard" + +# Challenges with testing exemptions: +# +# - exemptions require scorecards that have evaluated with failing rules; +# testing assumes no tenanted data, so this condition needs to be created as part of the test # -#def test_scorecards_drafts(capsys): -# cli(["scorecards", "create", "-f", "tests/test_scorecards_draft.yaml"]) +# - there is no public API to force evaluation of a scorecard; can look into possibility of using +# an internal endpoint for this # -# cli(["scorecards", "list", "-s"]) -# out, err = capsys.readouterr() +# - could create a scorecard as part of the test and wait for it to complete, but completion time for +# evaluating a scorecard is non-deterministic and, as experienced with query API tests, completion +# time can be 15 minutes or more, which will increase the time to complete testing by a factor of 5x +# or more # -# out = json.loads(out) -# assert any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in out['scorecards']) +# - exemptions requested by an API key with the Cortex ADMIN role are auto-approved, so the exemption must +# be requested with a key that has non-ADMIN privileges +# +# This means there are dependencies on running a test using a VIEWER role to request the exemption and a +# subsequent test using an ADMIN role to act on the exemption +# +# So this is how we'll roll for now . . . +# - Automated tests currently run in known tenants that have the 'test-scorecard' in an evaluated state. +# - So we can semi-reliably count on an evaluated scorecard to exist. @pytest.fixture(scope='session') @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) def test_exemption_that_will_be_approved(): - rule_id = _get_rule(0) - print("rule_id = " + rule_id) + rule_id = _get_rule("Has Custom Data") + response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test approve", "-ri", rule_id, "-d", "100"]) + assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" @pytest.mark.usefixtures('test_exemption_that_will_be_approved') def test_approve_exemption(): - rule_id = _get_rule(0) - print("rule_id = " + rule_id) + rule_id = _get_rule("Has Custom Data") + response = cli(["scorecards", "exemptions", "approve", "-s", "test-scorecard", "-t", "test-service", "-ri", rule_id]) + assert response['exemptions'][0]['exemptionStatus']['status'] == 'APPROVED', "exemption state should be APPROVED" response = cli(["scorecards", "exemptions", "revoke", "-s", "test-scorecard", "-t", "test-service", "-r", "I revoke you", "-ri", rule_id]) + assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" @pytest.fixture(scope='session') @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) def test_exemption_that_will_be_denied(): - rule_id = _get_rule(1) - print("rule_id = " + rule_id) + rule_id = _get_rule("Is Definitely False") + response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test deny", "-ri", rule_id, "-d", "100"]) + assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" @pytest.mark.usefixtures('test_exemption_that_will_be_denied') def test_deny_exemption(): - rule_id = _get_rule(1) - print("rule_id = " + rule_id) + rule_id = _get_rule("Is Definitely False") + response = cli(["scorecards", "exemptions", "deny", "-s", "test-scorecard", "-t", "test-service", "-r", "I deny, therefore I am", "-ri", rule_id]) + assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" From 5bfd05c8df39924346349d2519e381cfa27722fc Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 22 Nov 2024 16:52:29 -0800 Subject: [PATCH 38/56] Add AWS integration --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/integrations.py | 10 + .../commands/integrations_commands/aws.py | 222 ++++++++++++++++++ tests/conftest.py | 27 +++ tests/test_integrations_aws.py | 63 +++++ 5 files changed, 324 insertions(+) create mode 100644 cortexapps_cli/commands/integrations.py create mode 100644 cortexapps_cli/commands/integrations_commands/aws.py create mode 100644 tests/conftest.py create mode 100644 tests/test_integrations_aws.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 107e95b..922ae03 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -29,6 +29,7 @@ import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.scim as scim import cortexapps_cli.commands.scorecards as scorecards +import cortexapps_cli.commands.integrations as integrations import cortexapps_cli.commands.teams as teams app = typer.Typer( @@ -57,6 +58,7 @@ app.add_typer(rest.app, name="rest") app.add_typer(scim.app, name="scim") app.add_typer(scorecards.app, name="scorecards") +app.add_typer(integrations.app, name="integrations") app.add_typer(teams.app, name="teams") # global options diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py new file mode 100644 index 0000000..02227ff --- /dev/null +++ b/cortexapps_cli/commands/integrations.py @@ -0,0 +1,10 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +import cortexapps_cli.commands.integrations_commands.aws as aws + +app = typer.Typer(help="Integrations commands", + no_args_is_help=True) +app.add_typer(aws.app, name="aws") diff --git a/cortexapps_cli/commands/integrations_commands/aws.py b/cortexapps_cli/commands/integrations_commands/aws.py new file mode 100644 index 0000000..eaa5b85 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/aws.py @@ -0,0 +1,222 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="AWS commands", + no_args_is_help=True) + +# Make this a common client function? + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"accountId": a, "role": r}) + return result + +def _parse_key_value_types(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"type": a, "enabled": r}) + return result + +@app.command() +def add( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), + role: str = typer.Option(..., "--role", "-r", help="The IAM role Cortex would be assuming"), +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + data = { + "accountId": account_id, + "role": role + } + + r = client.post("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/aws/configurations/" + accountId) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/aws/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/aws/configurations/" + accountId) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/aws/configurations") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + configurations: list[str] | None = typer.Option(None, "--configurations", "-c", callback=_parse_key_value, help="List of account=role pairs (only if file input not provided."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update configurations + """ + + client = ctx.obj["client"] + + data = { + "configurations": [] + } + series_data = { + "configurations": configurations + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + + if configurations: + for item in configurations: + data["configurations"].append(item) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + account_id: str = typer.Option(..., "--account-id", "-a", help="The account ID for the AWS account"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/aws/configurations/validate" + accountId) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/aws/configurations") + print_json(data=r) + +@app.command() +def list_types( + ctx: typer.Context, + include_disabled: bool = typer.Option(False, "--include-disabled", "-i", help="When true, includes all AWS types supported"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results") +): + """ + List AWS types that have been imported + """ + + client = ctx.obj["client"] + + params = { + "includeDisabled": include_disabled, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/aws/types", params=params) + print_json(data=r) + +@app.command() +def update_types( + ctx: typer.Context, + types: list[str] | None = typer.Option(None, "--types", "-t", callback=_parse_key_value_types, help="List of type=True|False pairs (only if file input not provided."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON File containing AWS types that should be discovered and imported into catalog; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update configured AWS types + """ + + client = ctx.obj["client"] + + data = { + "types": [] + } + series_data = { + "types": types + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + if not types: + raise typer.BadParameter("One of --types or --file must be provided.") + + if types: + for item in types: + data["types"].append(item) + + print(data) + r = client.put("api/v1/aws/types", data=data) + print_json(data=r) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..3d0e906 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,27 @@ +import pytest +import inspect + +#@pytest.mark.trylast +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + terminal_reporter = config.pluginmanager.getplugin('terminalreporter') + config.pluginmanager.register(TestDescriptionPlugin(terminal_reporter), 'testdescription') + +class TestDescriptionPlugin: + + def __init__(self, terminal_reporter): + self.terminal_reporter = terminal_reporter + self.desc = None + + def pytest_runtest_protocol(self, item): + self.desc = inspect.getdoc(item.obj) + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_runtest_logstart(self, nodeid, location): + if self.terminal_reporter.verbosity == 0: + yield + else: + self.terminal_reporter.write('\n') + yield + if self.desc: + self.terminal_reporter.write(f'\n{self.desc} ') diff --git a/tests/test_integrations_aws.py b/tests/test_integrations_aws.py new file mode 100644 index 0000000..6b1f417 --- /dev/null +++ b/tests/test_integrations_aws.py @@ -0,0 +1,63 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_newrelic_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_aws_add(): + """ + Test adding a single configuration + """ + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", json=[{'accountId': 123, 'role:': 'test'}], status=200) + response = cli(["integrations", "aws", "add", "-a", "123", "-r", "test"]) + assert response[0]['accountId'] == 123, "accountId should be returned in response" + +@responses.activate +def test_integrations_aws_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/123456", status=200) + cli(["integrations", "aws", "delete", "-a", "123456"]) + +@responses.activate +def test_integrations_aws_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) + cli(["integrations", "aws", "delete-all"]) + +@responses.activate +def test_integrations_aws_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/123456", status=200) + cli(["integrations", "aws", "get", "-a", "123456"]) + +@responses.activate +def test_integrations_aws_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) + cli(["integrations", "aws", "get-all"]) + +@responses.activate +def test_integrations_aws_update(): + #f = _dummy_file(tmp_path) + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) + cli(["integrations", "aws", "update", "-c", "123=test", "-c", "456=test1"]) + +@responses.activate +def test_integrations_aws_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/validate/123456", status=200) + cli(["integrations", "aws", "validate", "-a", "123456"]) + +@responses.activate +def test_integrations_aws_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/all/validate", status=200) + cli(["integrations", "aws", "validate-all"]) + +@responses.activate +def test_integrations_list_types(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/types", json=[{'total': 1, 'types': [{'type': 'AWS::EC2::Instance'}]}], status=200) + cli(["integrations", "aws", "validate-all"]) + +@responses.activate +def test_integrations_aws_update_types(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/types", json=[{'types': [{'type': 'AWS::EC2::Instance'}]}], status=200) + cli(["integrations", "aws", "update-types", "-t", "AWS::EC2::Instance=true", "-t", "AWS::ECS::VPC=false"], ReturnType.RAW) From 31e308c7341c7aa3f12c969d5915cc9cb69bffd2 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 27 Nov 2024 11:37:06 -0800 Subject: [PATCH 39/56] Azure Resources --- cortexapps_cli/commands/integrations.py | 2 + .../commands/integrations_commands/aws.py | 2 +- .../integrations_commands/azure_resources.py | 259 ++++++++++++++++++ tests/helpers/utils.py | 1 + tests/test_integrations_aws.py | 4 +- tests/test_integrations_azure_resources.py | 69 +++++ 6 files changed, 334 insertions(+), 3 deletions(-) create mode 100644 cortexapps_cli/commands/integrations_commands/azure_resources.py create mode 100644 tests/test_integrations_azure_resources.py diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py index 02227ff..44cd4b6 100644 --- a/cortexapps_cli/commands/integrations.py +++ b/cortexapps_cli/commands/integrations.py @@ -4,7 +4,9 @@ from typing_extensions import Annotated import cortexapps_cli.commands.integrations_commands.aws as aws +import cortexapps_cli.commands.integrations_commands.azure_resources as azure_resources app = typer.Typer(help="Integrations commands", no_args_is_help=True) app.add_typer(aws.app, name="aws") +app.add_typer(azure_resources.app, name="azure-resources") diff --git a/cortexapps_cli/commands/integrations_commands/aws.py b/cortexapps_cli/commands/integrations_commands/aws.py index eaa5b85..92bcd04 100644 --- a/cortexapps_cli/commands/integrations_commands/aws.py +++ b/cortexapps_cli/commands/integrations_commands/aws.py @@ -166,7 +166,7 @@ def validate_all( @app.command() def list_types( ctx: typer.Context, - include_disabled: bool = typer.Option(False, "--include-disabled", "-i", help="When true, includes all AWS types supported"), + include_disabled: bool = typer.Option(False, "--include-disabled", "-i", help="When true, includes all types supported"), page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results") ): diff --git a/cortexapps_cli/commands/integrations_commands/azure_resources.py b/cortexapps_cli/commands/integrations_commands/azure_resources.py new file mode 100644 index 0000000..70b51b1 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/azure_resources.py @@ -0,0 +1,259 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Azure Resources commands", + no_args_is_help=True) + +# Make this a common client function? + +# Need a helper function to parse custom_data. +# cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) +# Results in: +# AssertionError: List types with complex sub-types are not currently supported +# +# borrowed from https://github.com/fastapi/typer/issues/387 +def _parse_key_value(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"accountId": a, "role": r}) + return result + +def _parse_key_value_types(values): + if values is None: + return [] + result = [] + for value in values: + a, r = value.split('=') + result.append({"type": a, "enabled": r}) + return result + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + organization_slug: str = typer.Option(..., "--organization-slug", "-o", help="Identifier for organization"), + personal_access_token: str = typer.Option(..., "--pat", "-p", help="Personal Access Token"), + username: str = typer.Option(..., "--username", "-u", help="Username"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or is_default or host or organization_slug or personal_access_token or file_input: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "host": host, + "isDefault": is_default, + "organizationSlug": organization_slug, + "personalAccessToken": personal_access_token, + "username": username + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/azure-devops/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-resources/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-resources/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-resources/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-resources/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-resources/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/azure-resources/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-resources/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-resources/configurations") + print_json(data=r) + +@app.command() +def list_types( + ctx: typer.Context, + include_disabled: bool = typer.Option(False, "--include-disabled", "-i", help="When true, includes all types supported"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results") +): + """ + List AWS types that have been imported + """ + + client = ctx.obj["client"] + + params = { + "includeDisabled": include_disabled, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/azure-resources/types", params=params) + print_json(data=r) + +@app.command() +def update_types( + ctx: typer.Context, + types: list[str] | None = typer.Option(None, "--types", "-t", callback=_parse_key_value_types, help="List of type=True|False pairs (only if file input not provided."), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON File containing types that should be discovered and imported into catalog; can be passed as stdin with -, example: -f-")] = None, +): + """ + Update configured Azure Resources types + """ + + client = ctx.obj["client"] + + data = { + "types": [] + } + series_data = { + "types": types + } + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + if not types: + raise typer.BadParameter("One of --types or --file must be provided.") + + if types: + for item in types: + data["types"].append(item) + + print(data) + r = client.put("api/v1/azure-resources/types", data=data) + print_json(data=r) diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py index 1c6330b..5e4453e 100644 --- a/tests/helpers/utils.py +++ b/tests/helpers/utils.py @@ -8,6 +8,7 @@ import pytest from typer.testing import CliRunner from unittest import mock +import responses runner = CliRunner() diff --git a/tests/test_integrations_aws.py b/tests/test_integrations_aws.py index 6b1f417..cddacf3 100644 --- a/tests/test_integrations_aws.py +++ b/tests/test_integrations_aws.py @@ -54,8 +54,8 @@ def test_integrations_aws_validate_all(): @responses.activate def test_integrations_list_types(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/types", json=[{'total': 1, 'types': [{'type': 'AWS::EC2::Instance'}]}], status=200) - cli(["integrations", "aws", "validate-all"]) + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/types", json=[{'total': 1, 'types': [{'type': 'AWS::EC2::Instance'}]}], status=200) + cli(["integrations", "aws", "list-types"]) @responses.activate def test_integrations_aws_update_types(): diff --git a/tests/test_integrations_azure_resources.py b/tests/test_integrations_azure_resources.py new file mode 100644 index 0000000..9892db3 --- /dev/null +++ b/tests/test_integrations_azure_resources.py @@ -0,0 +1,69 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_azure_resources_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_azure_resources_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration", json=[{'accountId': 123, 'role:': 'test'}], status=200) + cli(["integrations", "azure-resources", "add", "-a", "myAlias", "-h", "my.host.com", "-o", "my-slug", "-p", "123456", "-u", "steph.curry"]) + +@responses.activate +def test_integrations_azure_resources_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", json={}, status=200) + cli(["integrations", "azure-resources", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_azure_resources_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", status=200) + cli(["integrations", "azure-resources", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_azure_resources_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", status=200) + cli(["integrations", "azure-resources", "delete-all"]) + +@responses.activate +def test_integrations_azure_resources_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", json={}, status=200) + cli(["integrations", "azure-resources", "get", "-a", "test"]) + +@responses.activate +def test_integrations_azure_resources_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", json={}, status=200) + cli(["integrations", "azure-resources", "get-all"]) + +@responses.activate +def test_integrations_azure_resources_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/default-configuration", json={}, status=200) + cli(["integrations", "azure-resources", "get-default"]) + +@responses.activate +def test_integrations_azure_resources_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", json={}, status=200) + cli(["integrations", "azure-resources", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_azure_resources_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/validate/test", json={}, status=200) + cli(["integrations", "azure-resources", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_azure_resources_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/validate", json={}, status=200) + cli(["integrations", "azure-resources", "validate-all"]) + +@responses.activate +def test_integrations_list_types(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resoures/types", json={}, status=200) + cli(["integrations", "azure-resources", "list-types"]) + +@responses.activate +def test_integrations_azure_resoures_update_types(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resoures/types", json={}, status=200) + cli(["integrations", "azure-resources", "update-types", "-t", "microsoft.insights/workbooks=true", "-t", "microsoft.resources/subscriptions=false"], ReturnType.RAW) From b74653cdc6d3ac226258baa5421775900a38a1aa Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 27 Nov 2024 11:41:59 -0800 Subject: [PATCH 40/56] Azure DevOps --- cortexapps_cli/commands/integrations.py | 2 + .../integrations_commands/azure_devops.py | 175 ++++++++++++++++++ tests/test_integrations_azure_devops.py | 59 ++++++ 3 files changed, 236 insertions(+) create mode 100644 cortexapps_cli/commands/integrations_commands/azure_devops.py create mode 100644 tests/test_integrations_azure_devops.py diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py index 44cd4b6..23b2210 100644 --- a/cortexapps_cli/commands/integrations.py +++ b/cortexapps_cli/commands/integrations.py @@ -5,8 +5,10 @@ import cortexapps_cli.commands.integrations_commands.aws as aws import cortexapps_cli.commands.integrations_commands.azure_resources as azure_resources +import cortexapps_cli.commands.integrations_commands.azure_devops as azure_devops app = typer.Typer(help="Integrations commands", no_args_is_help=True) app.add_typer(aws.app, name="aws") app.add_typer(azure_resources.app, name="azure-resources") +app.add_typer(azure_devops.app, name="azure-devops") diff --git a/cortexapps_cli/commands/integrations_commands/azure_devops.py b/cortexapps_cli/commands/integrations_commands/azure_devops.py new file mode 100644 index 0000000..7e58fc0 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/azure_devops.py @@ -0,0 +1,175 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Azure Devops commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + organization_slug: str = typer.Option(..., "--organization-slug", "-o", help="Identifier for organization"), + personal_access_token: str = typer.Option(..., "--pat", "-p", help="Personal Access Token"), + username: str = typer.Option(..., "--username", "-u", help="Username"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or is_default or host or organization_slug or personal_access_token or file_input: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "host": host, + "isDefault": is_default, + "organizationSlug": organization_slug, + "personalAccessToken": personal_access_token, + "username": username + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/azure-devops/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/azure-devops/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations") + print_json(data=r) diff --git a/tests/test_integrations_azure_devops.py b/tests/test_integrations_azure_devops.py new file mode 100644 index 0000000..8f53728 --- /dev/null +++ b/tests/test_integrations_azure_devops.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_azure_devops_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_azure_devops_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration", json=[{'accountId': 123, 'role:': 'test'}], status=200) + cli(["integrations", "azure-devops", "add", "-a", "myAlias", "-h", "my.host.com", "-o", "my-slug", "-p", "123456", "-u", "steph.curry"]) + +@responses.activate +def test_integrations_azure_devops_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", json={}, status=200) + cli(["integrations", "azure-devops", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_azure_devops_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/test", status=200) + cli(["integrations", "azure-devops", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_azure_devops_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", status=200) + cli(["integrations", "azure-devops", "delete-all"]) + +@responses.activate +def test_integrations_azure_devops_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/test", json={}, status=200) + cli(["integrations", "azure-devops", "get", "-a", "test"]) + +@responses.activate +def test_integrations_azure_devops_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", json={}, status=200) + cli(["integrations", "azure-devops", "get-all"]) + +@responses.activate +def test_integrations_azure_devops_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/default-configuration", json={}, status=200) + cli(["integrations", "azure-devops", "get-default"]) + +@responses.activate +def test_integrations_azure_devops_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/test", json={}, status=200) + cli(["integrations", "azure-devops", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_azure_devops_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/validate/test", json={}, status=200) + cli(["integrations", "azure-devops", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_azure_devops_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configuration/validate", json={}, status=200) + cli(["integrations", "azure-devops", "validate-all"]) From 590b93f8a52c0b4cc7a8b5adb8ecf18698936efc Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 27 Nov 2024 12:43:08 -0800 Subject: [PATCH 41/56] CircleCI --- cortexapps_cli/commands/integrations.py | 2 + .../integrations_commands/azure_devops.py | 2 +- .../integrations_commands/azure_resources.py | 2 +- .../integrations_commands/circleci.py | 171 ++++++++++++++++++ tests/test_integrations_circleci.py | 59 ++++++ 5 files changed, 234 insertions(+), 2 deletions(-) create mode 100644 cortexapps_cli/commands/integrations_commands/circleci.py create mode 100644 tests/test_integrations_circleci.py diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py index 23b2210..2206d63 100644 --- a/cortexapps_cli/commands/integrations.py +++ b/cortexapps_cli/commands/integrations.py @@ -6,9 +6,11 @@ import cortexapps_cli.commands.integrations_commands.aws as aws import cortexapps_cli.commands.integrations_commands.azure_resources as azure_resources import cortexapps_cli.commands.integrations_commands.azure_devops as azure_devops +import cortexapps_cli.commands.integrations_commands.circleci as circleci app = typer.Typer(help="Integrations commands", no_args_is_help=True) app.add_typer(aws.app, name="aws") app.add_typer(azure_resources.app, name="azure-resources") app.add_typer(azure_devops.app, name="azure-devops") +app.add_typer(circleci.app, name="circleci") diff --git a/cortexapps_cli/commands/integrations_commands/azure_devops.py b/cortexapps_cli/commands/integrations_commands/azure_devops.py index 7e58fc0..0956a9d 100644 --- a/cortexapps_cli/commands/integrations_commands/azure_devops.py +++ b/cortexapps_cli/commands/integrations_commands/azure_devops.py @@ -24,7 +24,7 @@ def add( client = ctx.obj["client"] if file_input: - if alias or is_default or host or organization_slug or personal_access_token or file_input: + if alias or is_default or host or organization_slug or personal_access_token or username: raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") data = json.loads("".join([line for line in file_input])) else: diff --git a/cortexapps_cli/commands/integrations_commands/azure_resources.py b/cortexapps_cli/commands/integrations_commands/azure_resources.py index 70b51b1..d606692 100644 --- a/cortexapps_cli/commands/integrations_commands/azure_resources.py +++ b/cortexapps_cli/commands/integrations_commands/azure_resources.py @@ -50,7 +50,7 @@ def add( client = ctx.obj["client"] if file_input: - if alias or is_default or host or organization_slug or personal_access_token or file_input: + if alias or is_default or host or organization_slug or personal_access_token or username: raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") data = json.loads("".join([line for line in file_input])) else: diff --git a/cortexapps_cli/commands/integrations_commands/circleci.py b/cortexapps_cli/commands/integrations_commands/circleci.py new file mode 100644 index 0000000..cc71503 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/circleci.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="CircleCI commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/circleci/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/circleci/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/circleci/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/circleci/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/circleci/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/circleci/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/circleci/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/circleci/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/circleci/configurations") + print_json(data=r) diff --git a/tests/test_integrations_circleci.py b/tests/test_integrations_circleci.py new file mode 100644 index 0000000..903ddbd --- /dev/null +++ b/tests/test_integrations_circleci.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_circle_ci_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_circle_ci_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration", json={}, status=200) + cli(["integrations", "circleci", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_circle_ci_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", json={}, status=200) + cli(["integrations", "circleci", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_circle_ci_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/test", status=200) + cli(["integrations", "circleci", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", status=200) + cli(["integrations", "circleci", "delete-all"]) + +@responses.activate +def test_integrations_circle_ci_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/test", json={}, status=200) + cli(["integrations", "circleci", "get", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", json={}, status=200) + cli(["integrations", "circleci", "get-all"]) + +@responses.activate +def test_integrations_circle_ci_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/default-configuration", json={}, status=200) + cli(["integrations", "circleci", "get-default"]) + +@responses.activate +def test_integrations_circle_ci_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/test", json={}, status=200) + cli(["integrations", "circleci", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_circle_ci_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/validate/test", json={}, status=200) + cli(["integrations", "circleci", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configuration/validate", json={}, status=200) + cli(["integrations", "circleci", "validate-all"]) From 356476e458dc1507d22e337c5e1adb1f44e6f777 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 27 Nov 2024 16:24:05 -0800 Subject: [PATCH 42/56] Created rest of integrations, essentially all clones of each other, need to confirm parms are same throughout --- cortexapps_cli/commands/integrations.py | 20 ++ .../integrations_commands/coralogix.py | 180 ++++++++++++++++++ .../commands/integrations_commands/datadog.py | 179 +++++++++++++++++ .../commands/integrations_commands/github.py | 171 +++++++++++++++++ .../commands/integrations_commands/gitlab.py | 171 +++++++++++++++++ .../integrations_commands/incidentio.py | 171 +++++++++++++++++ .../integrations_commands/launchdarkly.py | 171 +++++++++++++++++ .../integrations_commands/newrelic.py | 171 +++++++++++++++++ .../integrations_commands/pagerduty.py | 171 +++++++++++++++++ .../integrations_commands/prometheus.py | 171 +++++++++++++++++ .../integrations_commands/sonarqube.py | 171 +++++++++++++++++ tests/test_integrations_coralogix.py | 59 ++++++ tests/test_integrations_datadog.py | 59 ++++++ tests/test_integrations_github.py | 59 ++++++ tests/test_integrations_gitlab.py | 59 ++++++ tests/test_integrations_incidentio.py | 59 ++++++ tests/test_integrations_launchdarkly.py | 59 ++++++ tests/test_integrations_newrelic.py | 59 ++++++ tests/test_integrations_pagerduty.py | 59 ++++++ tests/test_integrations_prometheus.py | 59 ++++++ tests/test_integrations_sonarqube.py | 59 ++++++ 21 files changed, 2337 insertions(+) create mode 100644 cortexapps_cli/commands/integrations_commands/coralogix.py create mode 100644 cortexapps_cli/commands/integrations_commands/datadog.py create mode 100644 cortexapps_cli/commands/integrations_commands/github.py create mode 100644 cortexapps_cli/commands/integrations_commands/gitlab.py create mode 100644 cortexapps_cli/commands/integrations_commands/incidentio.py create mode 100644 cortexapps_cli/commands/integrations_commands/launchdarkly.py create mode 100644 cortexapps_cli/commands/integrations_commands/newrelic.py create mode 100644 cortexapps_cli/commands/integrations_commands/pagerduty.py create mode 100644 cortexapps_cli/commands/integrations_commands/prometheus.py create mode 100644 cortexapps_cli/commands/integrations_commands/sonarqube.py create mode 100644 tests/test_integrations_coralogix.py create mode 100644 tests/test_integrations_datadog.py create mode 100644 tests/test_integrations_github.py create mode 100644 tests/test_integrations_gitlab.py create mode 100644 tests/test_integrations_incidentio.py create mode 100644 tests/test_integrations_launchdarkly.py create mode 100644 tests/test_integrations_newrelic.py create mode 100644 tests/test_integrations_pagerduty.py create mode 100644 tests/test_integrations_prometheus.py create mode 100644 tests/test_integrations_sonarqube.py diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py index 2206d63..c12307e 100644 --- a/cortexapps_cli/commands/integrations.py +++ b/cortexapps_cli/commands/integrations.py @@ -7,6 +7,16 @@ import cortexapps_cli.commands.integrations_commands.azure_resources as azure_resources import cortexapps_cli.commands.integrations_commands.azure_devops as azure_devops import cortexapps_cli.commands.integrations_commands.circleci as circleci +import cortexapps_cli.commands.integrations_commands.coralogix as coralogix +import cortexapps_cli.commands.integrations_commands.datadog as datadog +import cortexapps_cli.commands.integrations_commands.github as github +import cortexapps_cli.commands.integrations_commands.gitlab as gitlab +import cortexapps_cli.commands.integrations_commands.incidentio as incidentio +import cortexapps_cli.commands.integrations_commands.launchdarkly as launchdarkly +import cortexapps_cli.commands.integrations_commands.newrelic as newrelic +import cortexapps_cli.commands.integrations_commands.pagerduty as pagerduty +import cortexapps_cli.commands.integrations_commands.prometheus as prometheus +import cortexapps_cli.commands.integrations_commands.sonarqube as sonarqube app = typer.Typer(help="Integrations commands", no_args_is_help=True) @@ -14,3 +24,13 @@ app.add_typer(azure_resources.app, name="azure-resources") app.add_typer(azure_devops.app, name="azure-devops") app.add_typer(circleci.app, name="circleci") +app.add_typer(coralogix.app, name="coralogix") +app.add_typer(datadog.app, name="datadog") +app.add_typer(github.app, name="github") +app.add_typer(gitlab.app, name="gitlab") +app.add_typer(incidentio.app, name="incidentio") +app.add_typer(launchdarkly.app, name="launchdarkly") +app.add_typer(newrelic.app, name="newrelic") +app.add_typer(pagerduty.app, name="pagerduty") +app.add_typer(prometheus.app, name="prometheus") +app.add_typer(sonarqube.app, name="sonarqube") diff --git a/cortexapps_cli/commands/integrations_commands/coralogix.py b/cortexapps_cli/commands/integrations_commands/coralogix.py new file mode 100644 index 0000000..a066c06 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/coralogix.py @@ -0,0 +1,180 @@ +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Coralogix commands", + no_args_is_help=True) + +class Region(str, Enum): + US1 = "US1" + US2 = "US1" + EU1 = "US1" + EU2 = "US1" + AP1 = "AP1" + AP2 = "AP2" + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + region: Region = typer.Option(..., "--region", "-r", help="Region of configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or region: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "isDefault": is_default, + "region": region + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/coralogix/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/coralogix/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/coralogix/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/coralogix/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/coralogix/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/coralogix/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/coralogix/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/coralogix/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/coralogix/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/datadog.py b/cortexapps_cli/commands/integrations_commands/datadog.py new file mode 100644 index 0000000..600dd9f --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/datadog.py @@ -0,0 +1,179 @@ +from enum import Enum +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Datadog commands", + no_args_is_help=True) + +class Region(str, Enum): + US1 = "US1" + US3 = "US3" + US5 = "US5" + US1_FED = "US1_FED" + EU1 = "EU1" + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + region: Region = typer.Option(..., "--region", "-r", help="Region of configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/datadog/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/datadog/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/datadog/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/datadog/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/datadog/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/datadog/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/datadog/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/datadog/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/datadog/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/github.py b/cortexapps_cli/commands/integrations_commands/github.py new file mode 100644 index 0000000..f8225e8 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/github.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="GitHub commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/github/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/github/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/github/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/github/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/github/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/github/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/github/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/github/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/github/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/gitlab.py b/cortexapps_cli/commands/integrations_commands/gitlab.py new file mode 100644 index 0000000..1863d4a --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/gitlab.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="GitLab commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/gitlab/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/gitlab/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/gitlab/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/gitlab/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/gitlab/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/gitlab/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/gitlab/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/gitlab/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/gitlab/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/incidentio.py b/cortexapps_cli/commands/integrations_commands/incidentio.py new file mode 100644 index 0000000..04ad096 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/incidentio.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="IncidentIO commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/incidentio/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/incidentio/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/incidentio/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/incidentio/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/incidentio/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/incidentio/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/incidentio/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/incidentio/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/incidentio/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/launchdarkly.py b/cortexapps_cli/commands/integrations_commands/launchdarkly.py new file mode 100644 index 0000000..0c26c02 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/launchdarkly.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="LaunchDarkly commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/launchdarkly/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/launchdarkly/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/launchdarkly/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/launchdarkly/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/launchdarkly/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/launchdarkly/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/launchdarkly/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/launchdarkly/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/launchdarkly/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/newrelic.py b/cortexapps_cli/commands/integrations_commands/newrelic.py new file mode 100644 index 0000000..d588d08 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/newrelic.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="New Relic commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/newrelic/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/newrelic/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/newrelic/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/newrelic/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/newrelic/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/newrelic/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/newrelic/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/newrelic/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/newrelic/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/pagerduty.py b/cortexapps_cli/commands/integrations_commands/pagerduty.py new file mode 100644 index 0000000..71b30c9 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/pagerduty.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="PagerDuty commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/pagerduty/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/pagerduty/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/pagerduty/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/pagerduty/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/pagerduty/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/pagerduty/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/pagerduty/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/pagerduty/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/pagerduty/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/prometheus.py b/cortexapps_cli/commands/integrations_commands/prometheus.py new file mode 100644 index 0000000..8ce6b6b --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/prometheus.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Prometheus commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/prometheus/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/prometheus/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/prometheus/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/prometheus/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/prometheus/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/prometheus/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/prometheus/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/prometheus/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/prometheus/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/sonarqube.py b/cortexapps_cli/commands/integrations_commands/sonarqube.py new file mode 100644 index 0000000..18a55d9 --- /dev/null +++ b/cortexapps_cli/commands/integrations_commands/sonarqube.py @@ -0,0 +1,171 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="SonarQube commands", + no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + api_key: str = typer.Option(..., "--api-key", "-api", help="API key"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or api_key or is_default or host: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "apiKey": api_key, + "host": host, + "isDefault": is_default, + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/sonarqube/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/sonarqube/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/sonarqube/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/sonarqube/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/sonarqube/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/sonarqube/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/sonarqube/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/sonarqube/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/sonarqube/configurations") + print_json(data=r) diff --git a/tests/test_integrations_coralogix.py b/tests/test_integrations_coralogix.py new file mode 100644 index 0000000..d84c9cd --- /dev/null +++ b/tests/test_integrations_coralogix.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_coralogix_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_coralogix_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration", json={}, status=200) + cli(["integrations", "coralogix", "add", "-a", "myAlias", "-r", "US1", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_coralogix_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", json={}, status=200) + cli(["integrations", "coralogix", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_coralogix_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/test", status=200) + cli(["integrations", "coralogix", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_coralogix_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", status=200) + cli(["integrations", "coralogix", "delete-all"]) + +@responses.activate +def test_integrations_coralogix_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/test", json={}, status=200) + cli(["integrations", "coralogix", "get", "-a", "test"]) + +@responses.activate +def test_integrations_coralogix_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", json={}, status=200) + cli(["integrations", "coralogix", "get-all"]) + +@responses.activate +def test_integrations_coralogix_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/default-configuration", json={}, status=200) + cli(["integrations", "coralogix", "get-default"]) + +@responses.activate +def test_integrations_coralogix_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/test", json={}, status=200) + cli(["integrations", "coralogix", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_coralogix_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate/test", json={}, status=200) + cli(["integrations", "coralogix", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_coralogix_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate", json={}, status=200) + cli(["integrations", "coralogix", "validate-all"]) diff --git a/tests/test_integrations_datadog.py b/tests/test_integrations_datadog.py new file mode 100644 index 0000000..706c05f --- /dev/null +++ b/tests/test_integrations_datadog.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_datadog_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_datadog_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration", json={}, status=200) + cli(["integrations", "datadog", "add", "-a", "myAlias", "-r", "US1", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_datadog_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", json={}, status=200) + cli(["integrations", "datadog", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_datadog_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/test", status=200) + cli(["integrations", "datadog", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_datadog_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", status=200) + cli(["integrations", "datadog", "delete-all"]) + +@responses.activate +def test_integrations_datadog_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/test", json={}, status=200) + cli(["integrations", "datadog", "get", "-a", "test"]) + +@responses.activate +def test_integrations_datadog_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", json={}, status=200) + cli(["integrations", "datadog", "get-all"]) + +@responses.activate +def test_integrations_datadog_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/default-configuration", json={}, status=200) + cli(["integrations", "datadog", "get-default"]) + +@responses.activate +def test_integrations_datadog_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/test", json={}, status=200) + cli(["integrations", "datadog", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_datadog_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/validate/test", json={}, status=200) + cli(["integrations", "datadog", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_datadog_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configuration/validate", json={}, status=200) + cli(["integrations", "datadog", "validate-all"]) diff --git a/tests/test_integrations_github.py b/tests/test_integrations_github.py new file mode 100644 index 0000000..951fe06 --- /dev/null +++ b/tests/test_integrations_github.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_github_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_github_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration", json={}, status=200) + cli(["integrations", "github", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_github_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", json={}, status=200) + cli(["integrations", "github", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_github_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/test", status=200) + cli(["integrations", "github", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_github_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", status=200) + cli(["integrations", "github", "delete-all"]) + +@responses.activate +def test_integrations_github_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/test", json={}, status=200) + cli(["integrations", "github", "get", "-a", "test"]) + +@responses.activate +def test_integrations_github_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", json={}, status=200) + cli(["integrations", "github", "get-all"]) + +@responses.activate +def test_integrations_github_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/default-configuration", json={}, status=200) + cli(["integrations", "github", "get-default"]) + +@responses.activate +def test_integrations_github_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/test", json={}, status=200) + cli(["integrations", "github", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_github_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/validate/test", json={}, status=200) + cli(["integrations", "github", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_github_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configuration/validate", json={}, status=200) + cli(["integrations", "github", "validate-all"]) diff --git a/tests/test_integrations_gitlab.py b/tests/test_integrations_gitlab.py new file mode 100644 index 0000000..a46f97e --- /dev/null +++ b/tests/test_integrations_gitlab.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_gitlab_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_gitlab_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration", json={}, status=200) + cli(["integrations", "gitlab", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_gitlab_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", json={}, status=200) + cli(["integrations", "gitlab", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_gitlab_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/test", status=200) + cli(["integrations", "gitlab", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_gitlab_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", status=200) + cli(["integrations", "gitlab", "delete-all"]) + +@responses.activate +def test_integrations_gitlab_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/test", json={}, status=200) + cli(["integrations", "gitlab", "get", "-a", "test"]) + +@responses.activate +def test_integrations_gitlab_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", json={}, status=200) + cli(["integrations", "gitlab", "get-all"]) + +@responses.activate +def test_integrations_gitlab_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/default-configuration", json={}, status=200) + cli(["integrations", "gitlab", "get-default"]) + +@responses.activate +def test_integrations_gitlab_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/test", json={}, status=200) + cli(["integrations", "gitlab", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_gitlab_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/validate/test", json={}, status=200) + cli(["integrations", "gitlab", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_gitlab_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configuration/validate", json={}, status=200) + cli(["integrations", "gitlab", "validate-all"]) diff --git a/tests/test_integrations_incidentio.py b/tests/test_integrations_incidentio.py new file mode 100644 index 0000000..6100dcc --- /dev/null +++ b/tests/test_integrations_incidentio.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_incidentio_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_incidentio_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration", json={}, status=200) + cli(["integrations", "incidentio", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_incidentio_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json={}, status=200) + cli(["integrations", "incidentio", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_incidentio_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", status=200) + cli(["integrations", "incidentio", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_incidentio_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", status=200) + cli(["integrations", "incidentio", "delete-all"]) + +@responses.activate +def test_integrations_incidentio_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json={}, status=200) + cli(["integrations", "incidentio", "get", "-a", "test"]) + +@responses.activate +def test_integrations_incidentio_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json={}, status=200) + cli(["integrations", "incidentio", "get-all"]) + +@responses.activate +def test_integrations_incidentio_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/default-configuration", json={}, status=200) + cli(["integrations", "incidentio", "get-default"]) + +@responses.activate +def test_integrations_incidentio_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json={}, status=200) + cli(["integrations", "incidentio", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_incidentio_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate/test", json={}, status=200) + cli(["integrations", "incidentio", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_incidentio_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate", json={}, status=200) + cli(["integrations", "incidentio", "validate-all"]) diff --git a/tests/test_integrations_launchdarkly.py b/tests/test_integrations_launchdarkly.py new file mode 100644 index 0000000..b5aa665 --- /dev/null +++ b/tests/test_integrations_launchdarkly.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_launchdarkly_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_launchdarkly_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration", json={}, status=200) + cli(["integrations", "launchdarkly", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_launchdarkly_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", json={}, status=200) + cli(["integrations", "launchdarkly", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_launchdarkly_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/test", status=200) + cli(["integrations", "launchdarkly", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_launchdarkly_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", status=200) + cli(["integrations", "launchdarkly", "delete-all"]) + +@responses.activate +def test_integrations_launchdarkly_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/test", json={}, status=200) + cli(["integrations", "launchdarkly", "get", "-a", "test"]) + +@responses.activate +def test_integrations_launchdarkly_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", json={}, status=200) + cli(["integrations", "launchdarkly", "get-all"]) + +@responses.activate +def test_integrations_launchdarkly_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/default-configuration", json={}, status=200) + cli(["integrations", "launchdarkly", "get-default"]) + +@responses.activate +def test_integrations_launchdarkly_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/test", json={}, status=200) + cli(["integrations", "launchdarkly", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_launchdarkly_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate/test", json={}, status=200) + cli(["integrations", "launchdarkly", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_launchdarkly_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate", json={}, status=200) + cli(["integrations", "launchdarkly", "validate-all"]) diff --git a/tests/test_integrations_newrelic.py b/tests/test_integrations_newrelic.py new file mode 100644 index 0000000..71846b4 --- /dev/null +++ b/tests/test_integrations_newrelic.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_newrelic_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_newrelic_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration", json={}, status=200) + cli(["integrations", "newrelic", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_newrelic_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json={}, status=200) + cli(["integrations", "newrelic", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_newrelic_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", status=200) + cli(["integrations", "newrelic", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_newrelic_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", status=200) + cli(["integrations", "newrelic", "delete-all"]) + +@responses.activate +def test_integrations_newrelic_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json={}, status=200) + cli(["integrations", "newrelic", "get", "-a", "test"]) + +@responses.activate +def test_integrations_newrelic_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json={}, status=200) + cli(["integrations", "newrelic", "get-all"]) + +@responses.activate +def test_integrations_newrelic_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/default-configuration", json={}, status=200) + cli(["integrations", "newrelic", "get-default"]) + +@responses.activate +def test_integrations_newrelic_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json={}, status=200) + cli(["integrations", "newrelic", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_newrelic_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate/test", json={}, status=200) + cli(["integrations", "newrelic", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_newrelic_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate", json={}, status=200) + cli(["integrations", "newrelic", "validate-all"]) diff --git a/tests/test_integrations_pagerduty.py b/tests/test_integrations_pagerduty.py new file mode 100644 index 0000000..96752e4 --- /dev/null +++ b/tests/test_integrations_pagerduty.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_circle_ci_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_circle_ci_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration", json={}, status=200) + cli(["integrations", "pagerduty", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_circle_ci_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", json={}, status=200) + cli(["integrations", "pagerduty", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_circle_ci_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/test", status=200) + cli(["integrations", "pagerduty", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", status=200) + cli(["integrations", "pagerduty", "delete-all"]) + +@responses.activate +def test_integrations_circle_ci_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/test", json={}, status=200) + cli(["integrations", "pagerduty", "get", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", json={}, status=200) + cli(["integrations", "pagerduty", "get-all"]) + +@responses.activate +def test_integrations_circle_ci_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/default-configuration", json={}, status=200) + cli(["integrations", "pagerduty", "get-default"]) + +@responses.activate +def test_integrations_circle_ci_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/test", json={}, status=200) + cli(["integrations", "pagerduty", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_circle_ci_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/validate/test", json={}, status=200) + cli(["integrations", "pagerduty", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_circle_ci_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configuration/validate", json={}, status=200) + cli(["integrations", "pagerduty", "validate-all"]) diff --git a/tests/test_integrations_prometheus.py b/tests/test_integrations_prometheus.py new file mode 100644 index 0000000..3bb4c66 --- /dev/null +++ b/tests/test_integrations_prometheus.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_prometheus_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_prometheus_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration", json={}, status=200) + cli(["integrations", "prometheus", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_prometheus_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", json={}, status=200) + cli(["integrations", "prometheus", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_prometheus_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/test", status=200) + cli(["integrations", "prometheus", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_prometheus_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", status=200) + cli(["integrations", "prometheus", "delete-all"]) + +@responses.activate +def test_integrations_prometheus_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/test", json={}, status=200) + cli(["integrations", "prometheus", "get", "-a", "test"]) + +@responses.activate +def test_integrations_prometheus_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", json={}, status=200) + cli(["integrations", "prometheus", "get-all"]) + +@responses.activate +def test_integrations_prometheus_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/default-configuration", json={}, status=200) + cli(["integrations", "prometheus", "get-default"]) + +@responses.activate +def test_integrations_prometheus_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/test", json={}, status=200) + cli(["integrations", "prometheus", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_prometheus_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/validate/test", json={}, status=200) + cli(["integrations", "prometheus", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_prometheus_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configuration/validate", json={}, status=200) + cli(["integrations", "prometheus", "validate-all"]) diff --git a/tests/test_integrations_sonarqube.py b/tests/test_integrations_sonarqube.py new file mode 100644 index 0000000..0918f0e --- /dev/null +++ b/tests/test_integrations_sonarqube.py @@ -0,0 +1,59 @@ +from tests.helpers.utils import * + +# Since responses are all mocked and no data validation is done by the CLI -- +# we let the API handle validation -- we don't need valid input files. +def _dummy_file(tmp_path): + f = tmp_path / "test_integrations_sonarqube_add.json" + f.write_text("foobar") + return f + +@responses.activate +def test_integrations_sonarqube_add(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration", json={}, status=200) + cli(["integrations", "sonarqube", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + +@responses.activate +def test_integrations_sonarqube_add_multiple(tmp_path): + f = _dummy_file(tmp_path) + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", json={}, status=200) + cli(["integrations", "sonarqube", "add-multiple", "-f", str(f)]) + +@responses.activate +def test_integrations_sonarqube_delete(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/test", status=200) + cli(["integrations", "sonarqube", "delete", "-a", "test"]) + +@responses.activate +def test_integrations_sonarqube_delete_all(): + responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", status=200) + cli(["integrations", "sonarqube", "delete-all"]) + +@responses.activate +def test_integrations_sonarqube_get(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/test", json={}, status=200) + cli(["integrations", "sonarqube", "get", "-a", "test"]) + +@responses.activate +def test_integrations_sonarqube_get_all(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", json={}, status=200) + cli(["integrations", "sonarqube", "get-all"]) + +@responses.activate +def test_integrations_sonarqube_get_default(): + responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/default-configuration", json={}, status=200) + cli(["integrations", "sonarqube", "get-default"]) + +@responses.activate +def test_integrations_sonarqube_update(): + responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/test", json={}, status=200) + cli(["integrations", "sonarqube", "update", "-a", "test", "-i"]) + +@responses.activate +def test_integrations_sonarqube_validate(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate/test", json={}, status=200) + cli(["integrations", "sonarqube", "validate", "-a", "test"]) + +@responses.activate +def test_integrations_sonarqube_validate_all(): + responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate", json={}, status=200) + cli(["integrations", "sonarqube", "validate-all"]) From 2e57ab7adc447680d68cb4d1ce0f31cf60404e91 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 13 Dec 2024 15:31:07 -0800 Subject: [PATCH 43/56] Catalog updates --- cortexapps_cli/cli.py | 1 + cortexapps_cli/commands/catalog.py | 77 ++++++++++++++++++++++++++ cortexapps_cli/commands/custom_data.py | 39 ++++++++++++- cortexapps_cli/cortex_client.py | 6 ++ 4 files changed, 122 insertions(+), 1 deletion(-) diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 922ae03..a4237a3 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -34,6 +34,7 @@ app = typer.Typer( no_args_is_help=True, + rich_markup_mode="rich", context_settings={"help_option_names": ["-h", "--help"]} ) diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index a58b36c..a678fc8 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -165,3 +165,80 @@ def details( data = r if output_format == 'json' else [r] print_output_with_context(ctx, data) + +@app.command() +def archive( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Archive an entity + """ + client = ctx.obj["client"] + + r = client.put("api/v1/catalog/" + tag + "/archive") + print_output_with_context(ctx, r) + +@app.command() +def unarchive( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Unarchive an entity + """ + client = ctx.obj["client"] + + r = client.put("api/v1/catalog/" + tag + "/unarchive") + print_output_with_context(ctx, r) + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Delete an entity + """ + client = ctx.obj["client"] + + client.delete("api/v1/catalog/" + tag) + +@app.command() +def delete_by_type( + ctx: typer.Context, + types: CatalogCommandOptions.types = None, +): + """ + Dangerous operation that will delete all entities that are of the given type + """ + client = ctx.obj["client"] + + #TODO: check if types is a regex of form: ([-A-Za-z]+,)+ + + params = { + "types": types + } + + client.delete("api/v1/catalog", params=params) + + +@app.command() +def descriptor( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), +): + """ + Retrieve entity descriptor + """ + client = ctx.obj["client"] + + params = { + "yaml": yaml + } + + print("params = " + str(params)) + + r = client.get("api/v1/catalog/" + tag + "/openapi", params=params) + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 40a30fc..9f6ebfd 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -5,6 +5,7 @@ from rich import print_json app = typer.Typer(help="Custom data commands") + # Need a helper function to parse custom_data. # cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) # Results in: @@ -28,10 +29,46 @@ def add( key: str = typer.Option(None, "--key", "-k", help="The custom data key to create (only if file input not provided)."), value: str = typer.Option(None, "--value", "-v", help="The value of the custom data key (only if file input not provided)."), description: str = typer.Option(None, "--description", "-d", help="The description of the custom data key (only if file input not provided)."), - tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") ): """ Add custom data for entity + + Format of JSON-formatted configuration file: + + { + "description": "string", + "key": "my-key", + "value": { + "nested": { + "objects": "are ok" + } + } + } + + Examples: + --------- + Single value: + { + "description": "A field to store CI/CD tool", + "key": "ci-cd-tool", + "value": "Jenkins" + } + } + + Nested values: + { + "description": "Custom field to store build metrics", + "key": "build-metrics", + "value": { + "2023-08-01": { + "success-rate": "50" + }, + "2023-08-02": { + "success-rate": "67" + } + } + } """ client = ctx.obj["client"] diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 435db79..362233c 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -3,6 +3,8 @@ import typer from rich import print from rich import print_json +from rich.markdown import Markdown +from rich.console import Console from cortexapps_cli.utils import guess_data_key @@ -24,6 +26,10 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=F if content_type == 'application/json' and isinstance(data, dict): req_data = json.dumps(data) + #print("JEFF, requests, params = " + str(params)) + #params = json.dumps(params) + #print("JEFF, requests, now params = " + str(params)) + response = requests.request(method, url, params=params, headers=req_headers, data=req_data) if not response.ok: From d154a2583cf5de93edbc712ea7d27c02ab3960ce Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 20 Dec 2024 13:58:40 -0800 Subject: [PATCH 44/56] catalog and API key commands --- cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/api_keys.py | 53 +++++++++ cortexapps_cli/commands/catalog.py | 104 +++++++++++++++++- cortexapps_cli/cortex_client.py | 4 - tests/helpers/utils.py | 1 + tests/test_audit_logs.py | 1 + tests/test_audit_logs_dates.py | 1 + tests/test_audit_logs_end_date.py | 1 + tests/test_audit_logs_page.py | 1 + tests/test_audit_logs_size.py | 1 + tests/test_audit_logs_start_date.py | 1 + tests/test_catalog_archive_entity.py | 8 ++ tests/test_catalog_create_entity.py | 7 ++ tests/test_catalog_create_entity_viewer.py | 8 ++ tests/test_catalog_delete_entity.py | 13 +++ tests/test_catalog_get_entity_details.py | 5 + ...st_catalog_get_entity_details_hierarchy.py | 6 + tests/test_catalog_include_links.py | 8 ++ tests/test_catalog_include_metadata.py | 8 ++ tests/test_catalog_include_nested_fields.py | 8 ++ tests/test_catalog_list_by_github_repo.py | 5 + tests/test_catalog_list_by_group_multiple.py | 5 + tests/test_catalog_list_by_group_single.py | 5 + tests/test_catalog_list_by_owners_multiple.py | 5 + tests/test_catalog_list_by_owners_single.py | 5 + tests/test_catalog_list_by_types.py | 5 + tests/test_catalog_list_entity_descriptors.py | 6 + ...st_catalog_list_entity_descriptors_page.py | 5 + ...talog_list_entity_descriptors_page_size.py | 5 + ...st_catalog_list_entity_descriptors_yaml.py | 6 + tests/test_catalog_list_include_archived.py | 8 ++ tests/test_catalog_list_include_owners.py | 5 + tests/test_catalog_list_page.py | 5 + tests/test_catalog_list_page_size.py | 5 + ...test_catalog_retrieve_entity_descriptor.py | 6 + ...catalog_retrieve_entity_descriptor_yaml.py | 5 + tests/test_catalog_retrieve_entity_details.py | 5 + ...etrieve_entity_details_hierarchy_fields.py | 5 + ...atalog_retrieve_entity_scorecard_scores.py | 7 ++ tests/test_catalog_unarchive_entity.py | 11 ++ tests/test_integrations_sonarqube.py | 25 ++++- 41 files changed, 371 insertions(+), 9 deletions(-) create mode 100644 cortexapps_cli/commands/api_keys.py create mode 100644 tests/test_catalog_archive_entity.py create mode 100644 tests/test_catalog_create_entity.py create mode 100644 tests/test_catalog_create_entity_viewer.py create mode 100644 tests/test_catalog_delete_entity.py create mode 100644 tests/test_catalog_get_entity_details.py create mode 100644 tests/test_catalog_get_entity_details_hierarchy.py create mode 100644 tests/test_catalog_include_links.py create mode 100644 tests/test_catalog_include_metadata.py create mode 100644 tests/test_catalog_include_nested_fields.py create mode 100644 tests/test_catalog_list_by_github_repo.py create mode 100644 tests/test_catalog_list_by_group_multiple.py create mode 100644 tests/test_catalog_list_by_group_single.py create mode 100644 tests/test_catalog_list_by_owners_multiple.py create mode 100644 tests/test_catalog_list_by_owners_single.py create mode 100644 tests/test_catalog_list_by_types.py create mode 100644 tests/test_catalog_list_entity_descriptors.py create mode 100644 tests/test_catalog_list_entity_descriptors_page.py create mode 100644 tests/test_catalog_list_entity_descriptors_page_size.py create mode 100644 tests/test_catalog_list_entity_descriptors_yaml.py create mode 100644 tests/test_catalog_list_include_archived.py create mode 100644 tests/test_catalog_list_include_owners.py create mode 100644 tests/test_catalog_list_page.py create mode 100644 tests/test_catalog_list_page_size.py create mode 100644 tests/test_catalog_retrieve_entity_descriptor.py create mode 100644 tests/test_catalog_retrieve_entity_descriptor_yaml.py create mode 100644 tests/test_catalog_retrieve_entity_details.py create mode 100644 tests/test_catalog_retrieve_entity_details_hierarchy_fields.py create mode 100644 tests/test_catalog_retrieve_entity_scorecard_scores.py create mode 100644 tests/test_catalog_unarchive_entity.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index a4237a3..545c66d 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -9,6 +9,7 @@ from cortexapps_cli.cortex_client import CortexClient +import cortexapps_cli.commands.api_keys as api_keys import cortexapps_cli.commands.audit_logs as audit_logs import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.custom_data as custom_data @@ -39,6 +40,7 @@ ) # add subcommands +app.add_typer(api_keys.app, name="api-keys") app.add_typer(audit_logs.app, name="audit-logs") app.add_typer(catalog.app, name="catalog") app.add_typer(custom_data.app, name="custom-data") diff --git a/cortexapps_cli/commands/api_keys.py b/cortexapps_cli/commands/api_keys.py new file mode 100644 index 0000000..36e8b37 --- /dev/null +++ b/cortexapps_cli/commands/api_keys.py @@ -0,0 +1,53 @@ +from datetime import datetime +import typer +import json +from enum import Enum +from typing_extensions import Annotated +from cortexapps_cli.utils import print_output_with_context + +app = typer.Typer(help="API Keys commands") + +class DefaultRole(str, Enum): + ADMIN = "ADMIN" + USER = "USER" + READ_ONLY = "READ_ONLY" + +@app.command() +def create( + ctx: typer.Context, + description: str | None = typer.Option(None, "--description", "-d", help="Description of the API key"), + name: str | None = typer.Option(None, "--name", "-n", help="Name of the API key"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing content; can be passed as stdin with -, example: -f-")] = None, + default_roles: str | None = typer.Option(None, "--default-roles", "-dr", help="Comma-separated list of default roles (only if file input not provided."), + custom_roles: str | None = typer.Option(None, "--custom-roles", "-cr", help="Comma-separated list of custom roles (only if file input not provided."), + expiration_date: datetime | None = typer.Option(None, "--expiration-date", "-e", help="Expiration date of the API key", formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), +): + """ + Create new API key. The API key used to make the request must have the Create API keys permission + """ + client = ctx.obj["client"] + + if file_input: + if name or description or expiration_date or default_roles or custom_roles: + raise typer.BadParameter("When providing an API definition file, do not specify any other attributes") + data = json.loads("".join([line for line in file_input])) + + else: + data = { + "roles": [], + "name": name + } + + for role in default_roles.split(","): + data["roles"].append({"role": role, "type": "DEFAULT"}) + for role in custom_roles.split(","): + data["roles"].append({"tag": role, "type": "CUSTOM"}) + + if description: + data["description"] = description + if expiration_date: + data["expirationDate"] = expiration_date.strftime('%Y-%m-%dT%H:%M:%S.000Z') + + r = client.post("api/v1/auth/key", data=data) + #print(r) + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index a678fc8..730be77 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -44,6 +44,14 @@ class CatalogCommandOptions: Optional[bool], typer.Option("--include-metadata", "-m", help="Include custom data for each entity in the response", show_default=False) ] + dry_run = Annotated[ + Optional[bool], + typer.Option("--dry-run", "-dry", help="When true, only validates the descriptor contents and returns any errors or warnings", show_default=False) + ] + append_arrays = Annotated[ + Optional[bool], + typer.Option("--append-arrays", "-aa", help="Default merge behavior is to replace arrays, set this to true to append arrays instead. For simple types, duplicate values will be removed from the merged array", show_default=False) + ] git_repositories = Annotated[ Optional[str], typer.Option("--git-repositories", "-r", help="Supports only GitHub repositories in the org/repo format", show_default=False) @@ -235,10 +243,100 @@ def descriptor( client = ctx.obj["client"] params = { - "yaml": yaml + "yaml": str(yaml).lower() } - print("params = " + str(params)) - r = client.get("api/v1/catalog/" + tag + "/openapi", params=params) + if yaml: + print(r) + else: + print_output_with_context(ctx, r) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing YAML content of entity; can be passed as stdin with -, example: -f-")] = None, + dry_run: CatalogCommandOptions.dry_run = False, +): + """ + Create entity + """ + client = ctx.obj["client"] + + params = { + "dryRun": dry_run + } + + r = client.post("api/v1/open-api", data=file_input.read(), params=params, content_type="application/openapi;charset=UTF-8") + print_output_with_context(ctx, r) + +@app.command() +def patch( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing YAML content of entity; can be passed as stdin with -, example: -f-")] = None, + delete_marker_value = typer.Option("__delete__", "--delete-marker-value", "-dmv", help="Delete keys with this value from the merged yaml, defaults to __delete__, if any values match this, they will not be included in merged YAML. For example my_value: __delete__ will remove my_value from the merged YAML."), + dry_run: CatalogCommandOptions.dry_run = False, + append_arrays: CatalogCommandOptions.append_arrays = False, +): + """ + Creates or updates an entity. If the YAML refers to an entity that already exists (as referenced by the x-cortex-tag), this API will merge the specified changes into the existing entity + """ + client = ctx.obj["client"] + + params = { + "dryRun":dry_run, + "appendArrays": append_arrays, + "deleteMarkerValue": delete_marker_value + } + + r = client.patch("api/v1/open-api", data=file_input.read(), params=params, content_type="application/openapi;charset=UTF-8") + print_output_with_context(ctx, r) + +@app.command() +def list_descriptors( + ctx: typer.Context, + yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), + types: CatalogCommandOptions.types = None, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, +): + """ + List entity descriptors + """ + client = ctx.obj["client"] + + params = { + "yaml": yaml, + "types": types, + "pageSize": page_size, + "page": page + } + + r = client.get("api/v1/catalog/descriptors", params=params) + print_output_with_context(ctx, r) + +@app.command() +def gitops_log( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Retrieve most recent GitOps log for entity + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/gitops-logs") + print_output_with_context(ctx, r) + +@app.command() +def scorecard_scores( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), +): + """ + Retrieve entity Scorecard scores + """ + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/" + tag + "/scorecards") print_output_with_context(ctx, r) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 362233c..0442c4f 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -26,10 +26,6 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=F if content_type == 'application/json' and isinstance(data, dict): req_data = json.dumps(data) - #print("JEFF, requests, params = " + str(params)) - #params = json.dumps(params) - #print("JEFF, requests, now params = " + str(params)) - response = requests.request(method, url, params=params, headers=req_headers, data=req_data) if not response.ok: diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py index 5e4453e..981d309 100644 --- a/tests/helpers/utils.py +++ b/tests/helpers/utils.py @@ -9,6 +9,7 @@ from typer.testing import CliRunner from unittest import mock import responses +import yaml runner = CliRunner() diff --git a/tests/test_audit_logs.py b/tests/test_audit_logs.py index 5401b99..4ea6c7c 100644 --- a/tests/test_audit_logs.py +++ b/tests/test_audit_logs.py @@ -1,5 +1,6 @@ from tests.helpers.utils import * +@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): result = cli(["audit-logs", "get"]) assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_dates.py b/tests/test_audit_logs_dates.py index b9d40f7..8adcfe3 100644 --- a/tests/test_audit_logs_dates.py +++ b/tests/test_audit_logs_dates.py @@ -1,5 +1,6 @@ from tests.helpers.utils import * +@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): end_date = today() start_date = yesterday() diff --git a/tests/test_audit_logs_end_date.py b/tests/test_audit_logs_end_date.py index 2667e0e..e4b2eba 100644 --- a/tests/test_audit_logs_end_date.py +++ b/tests/test_audit_logs_end_date.py @@ -1,5 +1,6 @@ from tests.helpers.utils import * +@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): end_date = today() result = cli(["audit-logs", "get", "-e", end_date]) diff --git a/tests/test_audit_logs_page.py b/tests/test_audit_logs_page.py index 2e17e80..5fbc317 100644 --- a/tests/test_audit_logs_page.py +++ b/tests/test_audit_logs_page.py @@ -1,5 +1,6 @@ from tests.helpers.utils import * +@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): result = cli(["audit-logs", "get", "-p", "0"]) assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_size.py b/tests/test_audit_logs_size.py index a98056b..c6f0374 100644 --- a/tests/test_audit_logs_size.py +++ b/tests/test_audit_logs_size.py @@ -1,5 +1,6 @@ from tests.helpers.utils import * +@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): result = cli(["audit-logs", "get", "-p", "0", "-z", "1"]) assert (len(result['logs']) == 1) diff --git a/tests/test_audit_logs_start_date.py b/tests/test_audit_logs_start_date.py index bd3884b..1434a28 100644 --- a/tests/test_audit_logs_start_date.py +++ b/tests/test_audit_logs_start_date.py @@ -1,5 +1,6 @@ from tests.helpers.utils import * +@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): start_date = yesterday() result = cli(["audit-logs", "get", "-s", start_date]) diff --git a/tests/test_catalog_archive_entity.py b/tests/test_catalog_archive_entity.py new file mode 100644 index 0000000..8f81f9b --- /dev/null +++ b/tests/test_catalog_archive_entity.py @@ -0,0 +1,8 @@ +from tests.helpers.utils import * + +def test(): + cli(["catalog", "create", "-f", "data/run-time/archive-entity.yaml"]) + cli(["catalog", "archive", "-t", "archive-entity"]) + + response = cli(["catalog", "details", "-t", "archive-entity"]) + assert response['isArchived'] == True, "isArchived attribute should be true" diff --git a/tests/test_catalog_create_entity.py b/tests/test_catalog_create_entity.py new file mode 100644 index 0000000..c75b180 --- /dev/null +++ b/tests/test_catalog_create_entity.py @@ -0,0 +1,7 @@ +from tests.helpers.utils import * + +def test(): + cli(["catalog", "create", "-f", "data/run-time/create-entity.yaml"]) + + response = cli(["catalog", "descriptor", "-t", "create-entity"]) + assert response['info']['x-cortex-tag'] == "create-entity" diff --git a/tests/test_catalog_create_entity_viewer.py b/tests/test_catalog_create_entity_viewer.py new file mode 100644 index 0000000..db6db01 --- /dev/null +++ b/tests/test_catalog_create_entity_viewer.py @@ -0,0 +1,8 @@ +from tests.helpers.utils import * + +# Using a key with viewer role should be Forbidden. +@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) +def test(capsys): + response = cli(["catalog", "create", "-f", "data/run-time/create-entity.yaml"], ReturnType.RAW) + + assert "HTTP Error 403:" in response.stdout, "command fails with 403 error" diff --git a/tests/test_catalog_delete_entity.py b/tests/test_catalog_delete_entity.py new file mode 100644 index 0000000..3f51d2c --- /dev/null +++ b/tests/test_catalog_delete_entity.py @@ -0,0 +1,13 @@ +from tests.helpers.utils import * + +def test(): + cli(["catalog", "create", "-f", "data/run-time/delete-entity.yaml"]) + response = cli(["catalog", "details", "-t", "delete-entity"]) + assert response['tag'] == 'delete-entity', "Should find newly created entity" + + cli(["catalog", "delete", "-t", "delete-entity"]) + + # Since entity is deleted, cli command should exit with a Not Found, 404 error. + response = cli(["catalog", "details", "-t", "delete-entity"], ReturnType.RAW) + + assert "HTTP Error 404:" in response.stdout, "command fails with 403 error" diff --git a/tests/test_catalog_get_entity_details.py b/tests/test_catalog_get_entity_details.py new file mode 100644 index 0000000..c1563ab --- /dev/null +++ b/tests/test_catalog_get_entity_details.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli( ["catalog", "details", "-t", "backend-worker"]) + assert response['tag'] == 'backend-worker', "Entity details should be returned" diff --git a/tests/test_catalog_get_entity_details_hierarchy.py b/tests/test_catalog_get_entity_details_hierarchy.py new file mode 100644 index 0000000..4006392 --- /dev/null +++ b/tests/test_catalog_get_entity_details_hierarchy.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "details", "-i", "groups", "-t", "sso-integration"]) + assert response['hierarchy']['parents'][0]['groups'][0] == 'public-api-test', "Entity groups should be in response" + assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'public-api-test', "Parent groups should be in response" diff --git a/tests/test_catalog_include_links.py b/tests/test_catalog_include_links.py new file mode 100644 index 0000000..2de92da --- /dev/null +++ b/tests/test_catalog_include_links.py @@ -0,0 +1,8 @@ +from tests.helpers.utils import * + +# Too brittle if we assume only one entity has group 'include-links-test'? +def test(): + response = cli(["catalog", "list", "-g", "include-links-test"]) + assert (len(response['entities'][0]['links']) == 0) + response = cli(["catalog", "list", "-g", "include-links-test", "-l"]) + assert (len(response['entities'][0]['links']) > 0) diff --git a/tests/test_catalog_include_metadata.py b/tests/test_catalog_include_metadata.py new file mode 100644 index 0000000..aaab24c --- /dev/null +++ b/tests/test_catalog_include_metadata.py @@ -0,0 +1,8 @@ +from tests.helpers.utils import * + +# Too brittle if we assume only one entity has group 'include-metadata-test'? +def test(): + response = cli(["catalog", "list", "-g", "include-metadata-test"]) + assert (len(response['entities'][0]['metadata']) == 0) + response = cli(["catalog", "list", "-g", "include-metadata-test", "-m"]) + assert (len(response['entities'][0]['metadata']) > 0) diff --git a/tests/test_catalog_include_nested_fields.py b/tests/test_catalog_include_nested_fields.py new file mode 100644 index 0000000..7146cd2 --- /dev/null +++ b/tests/test_catalog_include_nested_fields.py @@ -0,0 +1,8 @@ +from tests.helpers.utils import * + +#@pytest.mark.skipif(allow_team_entities_in_catalog_api() == False, reason="Account flag ALLOW_TEAM_ENTITIES_IN_CATALOG_API is not set") +def test(): + response = cli(["catalog", "list", "-g", "public-api-test", "-io", "-in", "team:members"]) + list = [entity for entity in response['entities'] if entity['tag'] == "search-experience"] + assert not list == None, "found search-experience entity in response" + assert len(list[0]['members']) > 0, "response has non-empty array of members" diff --git a/tests/test_catalog_list_by_github_repo.py b/tests/test_catalog_list_by_github_repo.py new file mode 100644 index 0000000..2ac9f2b --- /dev/null +++ b/tests/test_catalog_list_by_github_repo.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli( ["catalog", "list", "-r", "my-org/my-repo"]) + assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_group_multiple.py b/tests/test_catalog_list_by_group_multiple.py new file mode 100644 index 0000000..de32aaf --- /dev/null +++ b/tests/test_catalog_list_by_group_multiple.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list", "-g", "public-api-test-group-1,public-api-test-group-2"]) + assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_group_single.py b/tests/test_catalog_list_by_group_single.py new file mode 100644 index 0000000..9b58dc1 --- /dev/null +++ b/tests/test_catalog_list_by_group_single.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list", "-g", "public-api-test-group-1"]) + assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_owners_multiple.py b/tests/test_catalog_list_by_owners_multiple.py new file mode 100644 index 0000000..c1cf512 --- /dev/null +++ b/tests/test_catalog_list_by_owners_multiple.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list", "-o", "payments-team,search-experience"]) + assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_owners_single.py b/tests/test_catalog_list_by_owners_single.py new file mode 100644 index 0000000..e0e0a30 --- /dev/null +++ b/tests/test_catalog_list_by_owners_single.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list", "-o", "payments-team"]) + assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_types.py b/tests/test_catalog_list_by_types.py new file mode 100644 index 0000000..5053dfc --- /dev/null +++ b/tests/test_catalog_list_by_types.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list", "-g", "public-api-test", "-t", "component"]) + assert response['total'] > 0, "Should find at least 1 entity of type 'component'" diff --git a/tests/test_catalog_list_entity_descriptors.py b/tests/test_catalog_list_entity_descriptors.py new file mode 100644 index 0000000..1cd5fcf --- /dev/null +++ b/tests/test_catalog_list_entity_descriptors.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list-descriptors", "-t", "api"]) + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "api-australia"] + assert list[0]['info']['x-cortex-groups'][0] == "public-api-test" diff --git a/tests/test_catalog_list_entity_descriptors_page.py b/tests/test_catalog_list_entity_descriptors_page.py new file mode 100644 index 0000000..1b505c6 --- /dev/null +++ b/tests/test_catalog_list_entity_descriptors_page.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list-descriptors", "-t", "component", "-p", "0", "-z", "1"]) + assert response['descriptors'][0]['info']['x-cortex-tag'] == "backend-worker" diff --git a/tests/test_catalog_list_entity_descriptors_page_size.py b/tests/test_catalog_list_entity_descriptors_page_size.py new file mode 100644 index 0000000..874a0ab --- /dev/null +++ b/tests/test_catalog_list_entity_descriptors_page_size.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list-descriptors", "-t", "component", "-z", "1"]) + assert (len(response['descriptors']) == 1) diff --git a/tests/test_catalog_list_entity_descriptors_yaml.py b/tests/test_catalog_list_entity_descriptors_yaml.py new file mode 100644 index 0000000..81050e8 --- /dev/null +++ b/tests/test_catalog_list_entity_descriptors_yaml.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list-descriptors", "-y", "-t", "component"]) + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "backend-worker"] + assert list[0]['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" diff --git a/tests/test_catalog_list_include_archived.py b/tests/test_catalog_list_include_archived.py new file mode 100644 index 0000000..0577b87 --- /dev/null +++ b/tests/test_catalog_list_include_archived.py @@ -0,0 +1,8 @@ +from tests.helpers.utils import * + +def test(capsys): + response = cli(["catalog", "list", "-g", "public-api-test", "-z", "500"]) + assert not any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should not find archived entity" + + response = cli(["catalog", "list", "-g", "public-api-test", "-a", "-z", "500"]) + assert any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should find archived entity" diff --git a/tests/test_catalog_list_include_owners.py b/tests/test_catalog_list_include_owners.py new file mode 100644 index 0000000..ccddb25 --- /dev/null +++ b/tests/test_catalog_list_include_owners.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(capsys): + response = cli(["catalog", "list", "-g", "public-api-test", "-io"]) + assert not(response['entities'][0]['owners']['teams'] is None), "Teams array should be returned in result" diff --git a/tests/test_catalog_list_page.py b/tests/test_catalog_list_page.py new file mode 100644 index 0000000..61bcc69 --- /dev/null +++ b/tests/test_catalog_list_page.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(capsys): + response = cli(["catalog", "list", "-g", "public-api-test", "-p", "0"]) + assert (len(response['entities']) > 0) diff --git a/tests/test_catalog_list_page_size.py b/tests/test_catalog_list_page_size.py new file mode 100644 index 0000000..afab118 --- /dev/null +++ b/tests/test_catalog_list_page_size.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "list", "-g", "public-api-test", "-p", "0", "-z", "1"]) + assert (len(response['entities']) == 1) diff --git a/tests/test_catalog_retrieve_entity_descriptor.py b/tests/test_catalog_retrieve_entity_descriptor.py new file mode 100644 index 0000000..093e7d4 --- /dev/null +++ b/tests/test_catalog_retrieve_entity_descriptor.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "descriptor", "-t", "backend-worker"]) + print(response) + assert response['info']['x-cortex-tag'] == "backend-worker" diff --git a/tests/test_catalog_retrieve_entity_descriptor_yaml.py b/tests/test_catalog_retrieve_entity_descriptor_yaml.py new file mode 100644 index 0000000..805829e --- /dev/null +++ b/tests/test_catalog_retrieve_entity_descriptor_yaml.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "descriptor", "-y", "-t", "backend-worker"], ReturnType.STDOUT) + assert yaml.safe_load(response)['info']['x-cortex-tag'] == "backend-worker" diff --git a/tests/test_catalog_retrieve_entity_details.py b/tests/test_catalog_retrieve_entity_details.py new file mode 100644 index 0000000..03d5460 --- /dev/null +++ b/tests/test_catalog_retrieve_entity_details.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "details", "-t", "backend-worker"]) + assert response['tag'] == "backend-worker" diff --git a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py new file mode 100644 index 0000000..ccad405 --- /dev/null +++ b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py @@ -0,0 +1,5 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["catalog", "details", "-t", "backend-worker", "-i", "groups"]) + assert response['tag'] == "backend-worker" diff --git a/tests/test_catalog_retrieve_entity_scorecard_scores.py b/tests/test_catalog_retrieve_entity_scorecard_scores.py new file mode 100644 index 0000000..d382457 --- /dev/null +++ b/tests/test_catalog_retrieve_entity_scorecard_scores.py @@ -0,0 +1,7 @@ +from tests.helpers.utils import * + +@pytest.mark.skip(reason="Cannot rely on scorecard to have been evaluated. Need FR to force evaluation?") +def test(): + response = cli(["catalog", "scorecard-scores", "-t", "backend-worker"]) + list = [scorecard for scorecard in response if scorecard['scorecardName'] == "Public API Test Production Readiness"] + assert list[0]['score'] == 1 diff --git a/tests/test_catalog_unarchive_entity.py b/tests/test_catalog_unarchive_entity.py new file mode 100644 index 0000000..2da2cd0 --- /dev/null +++ b/tests/test_catalog_unarchive_entity.py @@ -0,0 +1,11 @@ +from tests.helpers.utils import * + +def test(): + cli(["catalog", "create", "-f", "data/run-time/unarchive-entity.yaml"]) + cli(["catalog", "archive", "-t", "unarchive-entity"]) + + response = cli(["catalog", "details", "-t", "unarchive-entity"]) + assert response['isArchived'] == True, "isArchived attribute should be true" + + response = cli(["catalog", "unarchive", "-t", "unarchive-entity"]) + assert response['isArchived'] == False, "isArchived attribute should not be true" diff --git a/tests/test_integrations_sonarqube.py b/tests/test_integrations_sonarqube.py index 0918f0e..0f30e71 100644 --- a/tests/test_integrations_sonarqube.py +++ b/tests/test_integrations_sonarqube.py @@ -4,13 +4,34 @@ # we let the API handle validation -- we don't need valid input files. def _dummy_file(tmp_path): f = tmp_path / "test_integrations_sonarqube_add.json" - f.write_text("foobar") + + content = """ + { + "configurations": [ + { + "alias": "multiple-1", + "host": "cortex.io", + "isDefault": false, + "token": "string" + }, + { + "alias": "multiple-2", + "host": "cortex.io", + "isDefault": false, + "token": "string" + } + ] + } + """ + + f.write_text(content) + return f @responses.activate def test_integrations_sonarqube_add(): responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration", json={}, status=200) - cli(["integrations", "sonarqube", "add", "-a", "myAlias", "-h", "my.host.com", "--api-key", "123456", "-i"]) + cli(["integrations", "sonarqube", "add", "-a", "myAlias", "-h", "cortex.io", "--api-key", "123456", "-i"]) @responses.activate def test_integrations_sonarqube_add_multiple(tmp_path): From aa4eec7a22a03c47d08a107bc2273da7c0fff40c Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 20 Dec 2024 14:56:55 -0800 Subject: [PATCH 45/56] Add workflows --- cortexapps_cli/cli.py | 6 ++- cortexapps_cli/commands/workflows.py | 75 ++++++++++++++++++++++++++++ cortexapps_cli/cortex_client.py | 1 + data/run-time/test-workflows.json | 24 +++++++++ tests/test_workflows.py | 12 +++++ 5 files changed, 116 insertions(+), 2 deletions(-) create mode 100644 cortexapps_cli/commands/workflows.py create mode 100644 data/run-time/test-workflows.json create mode 100644 tests/test_workflows.py diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 545c66d..1500166 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -22,6 +22,7 @@ import cortexapps_cli.commands.entity_types as entity_types import cortexapps_cli.commands.gitops_logs as gitops_logs import cortexapps_cli.commands.groups as groups +import cortexapps_cli.commands.integrations as integrations import cortexapps_cli.commands.ip_allowlist as ip_allowlist import cortexapps_cli.commands.on_call as on_call import cortexapps_cli.commands.packages as packages @@ -30,8 +31,8 @@ import cortexapps_cli.commands.rest as rest import cortexapps_cli.commands.scim as scim import cortexapps_cli.commands.scorecards as scorecards -import cortexapps_cli.commands.integrations as integrations import cortexapps_cli.commands.teams as teams +import cortexapps_cli.commands.workflows as workflows app = typer.Typer( no_args_is_help=True, @@ -53,6 +54,7 @@ app.add_typer(entity_types.app, name="entity-types") app.add_typer(gitops_logs.app, name="gitops-logs") app.add_typer(groups.app, name="groups") +app.add_typer(integrations.app, name="integrations") app.add_typer(ip_allowlist.app, name="ip-allowlist") app.add_typer(on_call.app, name="on-call") app.add_typer(packages.app, name="packages") @@ -61,8 +63,8 @@ app.add_typer(rest.app, name="rest") app.add_typer(scim.app, name="scim") app.add_typer(scorecards.app, name="scorecards") -app.add_typer(integrations.app, name="integrations") app.add_typer(teams.app, name="teams") +app.add_typer(workflows.app, name="workflows") # global options @app.callback() diff --git a/cortexapps_cli/commands/workflows.py b/cortexapps_cli/commands/workflows.py new file mode 100644 index 0000000..8507954 --- /dev/null +++ b/cortexapps_cli/commands/workflows.py @@ -0,0 +1,75 @@ +import typer +from cortexapps_cli.utils import print_output_with_context +from typing_extensions import Annotated +import json + +app = typer.Typer(help="Workflows commands", no_args_is_help=True) + +@app.command() +def list( + ctx: typer.Context, + include_actions: bool = typer.Option(False, "--include-actions", "-i", help="When true, returns the list of actions for each workflow. Defaults to false"), + search_query: str = typer.Option(None, "--search-query", "-s", help="When set, only returns workflows with the given substring in the name or description"), + page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), +): + """ + Get users based on provided criteria. API key must have the View workflows permission + """ + + client = ctx.obj["client"] + + params = { + "includeActions": include_actions, + "searchQuery": search_query, + "page": page, + "pageSize": page_size + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + r = client.get("api/v1/workflows", params=params) + print_output_with_context(ctx, r) + +@app.command() +def get( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag or unique, auto-generated identifier for the workflow"), +): + """ + Retrieve workflow by tag or ID. API key must have the View workflows permission. + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/workflows/" + tag) + print_output_with_context(ctx, r) + +@app.command() +def delete( + ctx: typer.Context, + tag: str = typer.Option(..., "--tag", "-t", help="The tag or unique, auto-generated identifier for the workflow"), +): + """ + Delete workflow by tag or ID. API key must have the Edit workflows permission. + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/workflows/" + tag) + +@app.command() +def create( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help=" File containing workflow definition; can be passed as stdin with -, example: -f-")], +): + """ + Create or update new workflow. API key must have the Edit workflows permission. Note: If a workflow with the same tag already exists, it will be updated. + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + r = client.post("api/v1/workflows", data=data) + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 0442c4f..3cdf072 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -5,6 +5,7 @@ from rich import print_json from rich.markdown import Markdown from rich.console import Console +import urllib.parse from cortexapps_cli.utils import guess_data_key diff --git a/data/run-time/test-workflows.json b/data/run-time/test-workflows.json new file mode 100644 index 0000000..d95f866 --- /dev/null +++ b/data/run-time/test-workflows.json @@ -0,0 +1,24 @@ +{ + "name": "Hello World", + "tag": "hello-world", + "description": "Simple workflow", + "isDraft": true, + "filter": { + "type": "GLOBAL" + }, + "runResponseTemplate": null, + "actions": [ + { + "name": "Send Message", + "slug": "send-message", + "schema": { + "channel": "my-slack-channel", + "message": "Hello, world.", + "type": "SLACK" + }, + "outgoingActions": [], + "isRootAction": true + } + ], + "runRestrictionPolicies": [] +} diff --git a/tests/test_workflows.py b/tests/test_workflows.py new file mode 100644 index 0000000..ccd2c5a --- /dev/null +++ b/tests/test_workflows.py @@ -0,0 +1,12 @@ +from tests.helpers.utils import * +import yaml + +def test(): + cli(["workflows", "create", "-f", "data/run-time/test-workflows.json"]) + + response = cli(["workflows", "list"]) + assert any(workflow['tag'] == 'hello-world' for workflow in response['workflows']), "Should find workflow with tag hello-world" + + response = cli(["workflows", "get", "-t", "hello-world"]) + + response = cli(["workflows", "delete", "-t", "hello-world"]) From 02d946ea5adb1ea08c854eddf27dad41808a04cd Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 20 Dec 2024 15:22:44 -0800 Subject: [PATCH 46/56] Show help when no command provided --- cortexapps_cli/commands/api_keys.py | 2 +- cortexapps_cli/commands/audit_logs.py | 2 +- cortexapps_cli/commands/catalog.py | 2 +- cortexapps_cli/commands/custom_data.py | 2 +- cortexapps_cli/commands/custom_events.py | 2 +- cortexapps_cli/commands/dependencies.py | 2 +- cortexapps_cli/commands/deploys.py | 2 +- cortexapps_cli/commands/discovery_audit.py | 2 +- cortexapps_cli/commands/docs.py | 2 +- cortexapps_cli/commands/entity_types.py | 2 +- cortexapps_cli/commands/gitops_logs.py | 2 +- cortexapps_cli/commands/groups.py | 2 +- cortexapps_cli/commands/integrations.py | 3 +-- cortexapps_cli/commands/integrations_commands/aws.py | 3 +-- cortexapps_cli/commands/integrations_commands/azure_devops.py | 3 +-- .../commands/integrations_commands/azure_resources.py | 3 +-- cortexapps_cli/commands/integrations_commands/circleci.py | 3 +-- cortexapps_cli/commands/integrations_commands/coralogix.py | 3 +-- cortexapps_cli/commands/integrations_commands/datadog.py | 3 +-- cortexapps_cli/commands/integrations_commands/github.py | 3 +-- cortexapps_cli/commands/integrations_commands/gitlab.py | 3 +-- cortexapps_cli/commands/integrations_commands/incidentio.py | 3 +-- cortexapps_cli/commands/integrations_commands/launchdarkly.py | 3 +-- cortexapps_cli/commands/integrations_commands/newrelic.py | 3 +-- cortexapps_cli/commands/integrations_commands/pagerduty.py | 3 +-- cortexapps_cli/commands/integrations_commands/prometheus.py | 3 +-- cortexapps_cli/commands/integrations_commands/sonarqube.py | 3 +-- cortexapps_cli/commands/ip_allowlist.py | 2 +- cortexapps_cli/commands/on_call.py | 2 +- cortexapps_cli/commands/packages.py | 2 +- cortexapps_cli/commands/plugins.py | 2 +- cortexapps_cli/commands/queries.py | 2 +- cortexapps_cli/commands/rest.py | 2 +- cortexapps_cli/commands/scim.py | 2 +- cortexapps_cli/commands/scorecards.py | 3 +-- cortexapps_cli/commands/scorecards_commands/exemptions.py | 3 +-- 36 files changed, 36 insertions(+), 53 deletions(-) diff --git a/cortexapps_cli/commands/api_keys.py b/cortexapps_cli/commands/api_keys.py index 36e8b37..df2c1cc 100644 --- a/cortexapps_cli/commands/api_keys.py +++ b/cortexapps_cli/commands/api_keys.py @@ -5,7 +5,7 @@ from typing_extensions import Annotated from cortexapps_cli.utils import print_output_with_context -app = typer.Typer(help="API Keys commands") +app = typer.Typer(help="API Keys commands", no_args_is_help=True) class DefaultRole(str, Enum): ADMIN = "ADMIN" diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index 1813af0..f7fcb04 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -2,7 +2,7 @@ from enum import Enum import typer -app = typer.Typer(help="Audit log commands") +app = typer.Typer(help="Audit log commands", no_args_is_help=True) class Action(str, Enum): CREATE = "CREATE" diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 730be77..3c8a1ed 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -5,7 +5,7 @@ from cortexapps_cli.command_options import ListCommandOptions from cortexapps_cli.utils import print_output_with_context -app = typer.Typer(help="Catalog commands") +app = typer.Typer(help="Catalog commands", no_args_is_help=True) class CatalogCommandOptions: include_archived = Annotated[ diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 9f6ebfd..42cb3fa 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -4,7 +4,7 @@ from rich import print_json -app = typer.Typer(help="Custom data commands") +app = typer.Typer(help="Custom data commands", no_args_is_help=True) # Need a helper function to parse custom_data. # cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py index 2d37de3..dba2862 100644 --- a/cortexapps_cli/commands/custom_events.py +++ b/cortexapps_cli/commands/custom_events.py @@ -5,7 +5,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Custom events commands") +app = typer.Typer(help="Custom events commands", no_args_is_help=True) # Need a helper function to parse custom_data. # cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py index ae3625a..5b9ccfd 100644 --- a/cortexapps_cli/commands/dependencies.py +++ b/cortexapps_cli/commands/dependencies.py @@ -4,7 +4,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Dependency commands") +app = typer.Typer(help="Dependency commands", no_args_is_help=True) # Need a helper function to parse custom_data. # cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) diff --git a/cortexapps_cli/commands/deploys.py b/cortexapps_cli/commands/deploys.py index b97e83d..fc3388e 100644 --- a/cortexapps_cli/commands/deploys.py +++ b/cortexapps_cli/commands/deploys.py @@ -6,7 +6,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Deploys commands") +app = typer.Typer(help="Deploys commands", no_args_is_help=True) # Need a helper function to parse custom_data. # cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) diff --git a/cortexapps_cli/commands/discovery_audit.py b/cortexapps_cli/commands/discovery_audit.py index 6ea7df4..5a4df23 100644 --- a/cortexapps_cli/commands/discovery_audit.py +++ b/cortexapps_cli/commands/discovery_audit.py @@ -42,7 +42,7 @@ class DiscoverySource(str, Enum): SERVICENOW = "SERVICENOW" SERVICENOW_DOMAIN = "SERVICENOW_DOMAIN" -app = typer.Typer(help="Discovery Audit commands") +app = typer.Typer(help="Discovery Audit commands", no_args_is_help=True) @app.command() def get( diff --git a/cortexapps_cli/commands/docs.py b/cortexapps_cli/commands/docs.py index a2f32c7..8ffc27e 100644 --- a/cortexapps_cli/commands/docs.py +++ b/cortexapps_cli/commands/docs.py @@ -4,7 +4,7 @@ from typing_extensions import Annotated import yaml -app = typer.Typer(help="Docs commands") +app = typer.Typer(help="Docs commands", no_args_is_help=True) @app.command() def get( diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py index 432512c..89d5cd6 100644 --- a/cortexapps_cli/commands/entity_types.py +++ b/cortexapps_cli/commands/entity_types.py @@ -6,7 +6,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Entity Types commands") +app = typer.Typer(help="Entity Types commands", no_args_is_help=True) @app.command() def list( diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py index e6aa43b..b255ec4 100644 --- a/cortexapps_cli/commands/gitops_logs.py +++ b/cortexapps_cli/commands/gitops_logs.py @@ -3,7 +3,7 @@ #from rich import print_json import typer -app = typer.Typer(help="GitOps Logs commands") +app = typer.Typer(help="GitOps Logs commands", no_args_is_help=True) class Operation(str, Enum): ARCHIVED = "ARCHIVED" diff --git a/cortexapps_cli/commands/groups.py b/cortexapps_cli/commands/groups.py index 77fc12b..45c6f7b 100644 --- a/cortexapps_cli/commands/groups.py +++ b/cortexapps_cli/commands/groups.py @@ -2,7 +2,7 @@ from rich import print_json import typer -app = typer.Typer(help="Groups commands") +app = typer.Typer(help="Groups commands", no_args_is_help=True) @app.command() def get( diff --git a/cortexapps_cli/commands/integrations.py b/cortexapps_cli/commands/integrations.py index c12307e..5e2f7c7 100644 --- a/cortexapps_cli/commands/integrations.py +++ b/cortexapps_cli/commands/integrations.py @@ -18,8 +18,7 @@ import cortexapps_cli.commands.integrations_commands.prometheus as prometheus import cortexapps_cli.commands.integrations_commands.sonarqube as sonarqube -app = typer.Typer(help="Integrations commands", - no_args_is_help=True) +app = typer.Typer(help="Integrations commands", no_args_is_help=True) app.add_typer(aws.app, name="aws") app.add_typer(azure_resources.app, name="azure-resources") app.add_typer(azure_devops.app, name="azure-devops") diff --git a/cortexapps_cli/commands/integrations_commands/aws.py b/cortexapps_cli/commands/integrations_commands/aws.py index 92bcd04..a3aba2e 100644 --- a/cortexapps_cli/commands/integrations_commands/aws.py +++ b/cortexapps_cli/commands/integrations_commands/aws.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="AWS commands", - no_args_is_help=True) +app = typer.Typer(help="AWS commands", no_args_is_help=True) # Make this a common client function? diff --git a/cortexapps_cli/commands/integrations_commands/azure_devops.py b/cortexapps_cli/commands/integrations_commands/azure_devops.py index 0956a9d..5134acd 100644 --- a/cortexapps_cli/commands/integrations_commands/azure_devops.py +++ b/cortexapps_cli/commands/integrations_commands/azure_devops.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Azure Devops commands", - no_args_is_help=True) +app = typer.Typer(help="Azure Devops commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/azure_resources.py b/cortexapps_cli/commands/integrations_commands/azure_resources.py index d606692..c023bb3 100644 --- a/cortexapps_cli/commands/integrations_commands/azure_resources.py +++ b/cortexapps_cli/commands/integrations_commands/azure_resources.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Azure Resources commands", - no_args_is_help=True) +app = typer.Typer(help="Azure Resources commands", no_args_is_help=True) # Make this a common client function? diff --git a/cortexapps_cli/commands/integrations_commands/circleci.py b/cortexapps_cli/commands/integrations_commands/circleci.py index cc71503..3a85095 100644 --- a/cortexapps_cli/commands/integrations_commands/circleci.py +++ b/cortexapps_cli/commands/integrations_commands/circleci.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="CircleCI commands", - no_args_is_help=True) +app = typer.Typer(help="CircleCI commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/coralogix.py b/cortexapps_cli/commands/integrations_commands/coralogix.py index a066c06..124cd11 100644 --- a/cortexapps_cli/commands/integrations_commands/coralogix.py +++ b/cortexapps_cli/commands/integrations_commands/coralogix.py @@ -4,8 +4,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Coralogix commands", - no_args_is_help=True) +app = typer.Typer(help="Coralogix commands", no_args_is_help=True) class Region(str, Enum): US1 = "US1" diff --git a/cortexapps_cli/commands/integrations_commands/datadog.py b/cortexapps_cli/commands/integrations_commands/datadog.py index 600dd9f..105f595 100644 --- a/cortexapps_cli/commands/integrations_commands/datadog.py +++ b/cortexapps_cli/commands/integrations_commands/datadog.py @@ -4,8 +4,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Datadog commands", - no_args_is_help=True) +app = typer.Typer(help="Datadog commands", no_args_is_help=True) class Region(str, Enum): US1 = "US1" diff --git a/cortexapps_cli/commands/integrations_commands/github.py b/cortexapps_cli/commands/integrations_commands/github.py index f8225e8..ab7ab83 100644 --- a/cortexapps_cli/commands/integrations_commands/github.py +++ b/cortexapps_cli/commands/integrations_commands/github.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="GitHub commands", - no_args_is_help=True) +app = typer.Typer(help="GitHub commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/gitlab.py b/cortexapps_cli/commands/integrations_commands/gitlab.py index 1863d4a..2e3da64 100644 --- a/cortexapps_cli/commands/integrations_commands/gitlab.py +++ b/cortexapps_cli/commands/integrations_commands/gitlab.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="GitLab commands", - no_args_is_help=True) +app = typer.Typer(help="GitLab commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/incidentio.py b/cortexapps_cli/commands/integrations_commands/incidentio.py index 04ad096..ad36b1e 100644 --- a/cortexapps_cli/commands/integrations_commands/incidentio.py +++ b/cortexapps_cli/commands/integrations_commands/incidentio.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="IncidentIO commands", - no_args_is_help=True) +app = typer.Typer(help="IncidentIO commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/launchdarkly.py b/cortexapps_cli/commands/integrations_commands/launchdarkly.py index 0c26c02..2711590 100644 --- a/cortexapps_cli/commands/integrations_commands/launchdarkly.py +++ b/cortexapps_cli/commands/integrations_commands/launchdarkly.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="LaunchDarkly commands", - no_args_is_help=True) +app = typer.Typer(help="LaunchDarkly commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/newrelic.py b/cortexapps_cli/commands/integrations_commands/newrelic.py index d588d08..9a59ed2 100644 --- a/cortexapps_cli/commands/integrations_commands/newrelic.py +++ b/cortexapps_cli/commands/integrations_commands/newrelic.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="New Relic commands", - no_args_is_help=True) +app = typer.Typer(help="New Relic commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/pagerduty.py b/cortexapps_cli/commands/integrations_commands/pagerduty.py index 71b30c9..64993b3 100644 --- a/cortexapps_cli/commands/integrations_commands/pagerduty.py +++ b/cortexapps_cli/commands/integrations_commands/pagerduty.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="PagerDuty commands", - no_args_is_help=True) +app = typer.Typer(help="PagerDuty commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/prometheus.py b/cortexapps_cli/commands/integrations_commands/prometheus.py index 8ce6b6b..feb4685 100644 --- a/cortexapps_cli/commands/integrations_commands/prometheus.py +++ b/cortexapps_cli/commands/integrations_commands/prometheus.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Prometheus commands", - no_args_is_help=True) +app = typer.Typer(help="Prometheus commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/integrations_commands/sonarqube.py b/cortexapps_cli/commands/integrations_commands/sonarqube.py index 18a55d9..b9260d4 100644 --- a/cortexapps_cli/commands/integrations_commands/sonarqube.py +++ b/cortexapps_cli/commands/integrations_commands/sonarqube.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="SonarQube commands", - no_args_is_help=True) +app = typer.Typer(help="SonarQube commands", no_args_is_help=True) @app.command() def add( diff --git a/cortexapps_cli/commands/ip_allowlist.py b/cortexapps_cli/commands/ip_allowlist.py index 30c3102..d692725 100644 --- a/cortexapps_cli/commands/ip_allowlist.py +++ b/cortexapps_cli/commands/ip_allowlist.py @@ -1,6 +1,6 @@ import typer -app = typer.Typer(help="IP Allowlist commands") +app = typer.Typer(help="IP Allowlist commands", no_args_is_help=True) @app.command() def get( diff --git a/cortexapps_cli/commands/on_call.py b/cortexapps_cli/commands/on_call.py index 3070f05..f5504a6 100644 --- a/cortexapps_cli/commands/on_call.py +++ b/cortexapps_cli/commands/on_call.py @@ -2,7 +2,7 @@ import json from rich import print_json -app = typer.Typer(help="On Call commands") +app = typer.Typer(help="On Call commands", no_args_is_help=True) @app.command() def get( diff --git a/cortexapps_cli/commands/packages.py b/cortexapps_cli/commands/packages.py index 4d00164..dfac1bc 100644 --- a/cortexapps_cli/commands/packages.py +++ b/cortexapps_cli/commands/packages.py @@ -7,7 +7,7 @@ import cortexapps_cli.commands.packages_commands.node as node import cortexapps_cli.commands.packages_commands.nuget as nuget -app = typer.Typer(help="Packages commands") +app = typer.Typer(help="Packages commands", no_args_is_help=True) app.add_typer(go.app, name="go") app.add_typer(java.app, name="java") app.add_typer(python.app, name="python") diff --git a/cortexapps_cli/commands/plugins.py b/cortexapps_cli/commands/plugins.py index f7cb0c9..4aff647 100644 --- a/cortexapps_cli/commands/plugins.py +++ b/cortexapps_cli/commands/plugins.py @@ -3,7 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Plugins commands") +app = typer.Typer(help="Plugins commands", no_args_is_help=True) @app.command() def list( diff --git a/cortexapps_cli/commands/queries.py b/cortexapps_cli/commands/queries.py index 72d61f8..416f7b8 100644 --- a/cortexapps_cli/commands/queries.py +++ b/cortexapps_cli/commands/queries.py @@ -3,7 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Queries commands") +app = typer.Typer(help="Queries commands", no_args_is_help=True) @app.command() def run( diff --git a/cortexapps_cli/commands/rest.py b/cortexapps_cli/commands/rest.py index 54cfb78..11a4156 100644 --- a/cortexapps_cli/commands/rest.py +++ b/cortexapps_cli/commands/rest.py @@ -7,7 +7,7 @@ from typing_extensions import Annotated from rich import print_json -app = typer.Typer(help="REST API commands") +app = typer.Typer(help="REST API commands", no_args_is_help=True) def parse_multi_value_option(option: List[str] | None) -> dict: if option is None: diff --git a/cortexapps_cli/commands/scim.py b/cortexapps_cli/commands/scim.py index 7486c44..b569115 100644 --- a/cortexapps_cli/commands/scim.py +++ b/cortexapps_cli/commands/scim.py @@ -4,7 +4,7 @@ import typer import urllib.parse -app = typer.Typer(help="SCIM commands") +app = typer.Typer(help="SCIM commands", no_args_is_help=True) # As of November 2024, sortBy and sortOrder are not supported in our code and result in a 501 error # Not sure how domain is supposed to be used so leaving it out too diff --git a/cortexapps_cli/commands/scorecards.py b/cortexapps_cli/commands/scorecards.py index 450a973..6f02c2c 100644 --- a/cortexapps_cli/commands/scorecards.py +++ b/cortexapps_cli/commands/scorecards.py @@ -5,8 +5,7 @@ import cortexapps_cli.commands.scorecards_commands.exemptions as exemptions -app = typer.Typer(help="Scorecards commands", - no_args_is_help=True) +app = typer.Typer(help="Scorecards commands", no_args_is_help=True) app.add_typer(exemptions.app, name="exemptions") @app.command() diff --git a/cortexapps_cli/commands/scorecards_commands/exemptions.py b/cortexapps_cli/commands/scorecards_commands/exemptions.py index 78f27e2..287e18f 100644 --- a/cortexapps_cli/commands/scorecards_commands/exemptions.py +++ b/cortexapps_cli/commands/scorecards_commands/exemptions.py @@ -3,8 +3,7 @@ import typer from typing_extensions import Annotated -app = typer.Typer(help="Exemptions commands", - no_args_is_help=True) +app = typer.Typer(help="Exemptions commands", no_args_is_help=True) @app.command() def request( From 3ecae2eba5e70b2ef05ea7e68563b6cdc8b07707 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 30 May 2025 14:10:22 -0700 Subject: [PATCH 47/56] Big sweep to get all tests running --- HISTORY.md | 21 + Justfile | 16 +- cortexapps_cli/cli.py | 10 +- cortexapps_cli/command_options.py | 6 + cortexapps_cli/commands/api_keys.py | 112 ++- cortexapps_cli/commands/audit_logs.py | 37 +- cortexapps_cli/commands/backup.py | 274 +++++++ .../commands/backup_commands/cortex_export.py | 174 +++++ cortexapps_cli/commands/catalog.py | 32 +- cortexapps_cli/commands/custom_data.py | 34 +- cortexapps_cli/commands/custom_events.py | 44 +- cortexapps_cli/commands/custom_metrics.py | 3 +- cortexapps_cli/commands/dependencies.py | 5 +- cortexapps_cli/commands/deploys.py | 11 +- cortexapps_cli/commands/entity_types.py | 52 +- cortexapps_cli/commands/gitops_logs.py | 36 +- cortexapps_cli/commands/groups.py | 3 +- cortexapps_cli/commands/ip_allowlist.py | 18 +- cortexapps_cli/commands/packages.py | 37 +- cortexapps_cli/commands/plugins.py | 65 +- cortexapps_cli/commands/scorecards.py | 59 +- cortexapps_cli/commands/workflows.py | 63 +- cortexapps_cli/cortex_client.py | 25 +- poetry.lock | 677 +++++++++++------- pyproject.toml | 2 + tests/plugins.json | 298 ++++++++ tests/test_api_keys.py | 13 + tests/test_audit_logs.py | 2 +- tests/test_audit_logs_dates.py | 2 +- tests/test_audit_logs_end_date.py | 2 +- tests/test_audit_logs_page.py | 2 +- tests/test_audit_logs_size.py | 2 +- tests/test_audit_logs_start_date.py | 2 +- tests/test_catalog_create_entity.py | 1 + ...talog_list_entity_descriptors_page_size.py | 2 +- ...st_catalog_list_entity_descriptors_yaml.py | 3 +- tests/test_config_file.py | 97 +++ tests/test_custom_metrics.py | 6 +- tests/test_deploys.py | 6 +- tests/test_scim.py | 2 +- tests/test_scorecards.py | 4 + tests/test_stdin.py | 11 + 42 files changed, 1919 insertions(+), 352 deletions(-) create mode 100644 cortexapps_cli/commands/backup.py create mode 100644 cortexapps_cli/commands/backup_commands/cortex_export.py create mode 100644 tests/plugins.json create mode 100644 tests/test_api_keys.py create mode 100644 tests/test_config_file.py create mode 100644 tests/test_stdin.py diff --git a/HISTORY.md b/HISTORY.md index 4554d56..f915dc3 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,6 +1,27 @@ Release History =============== +1.0.0 (2025-0x-yy) +------------------ + +**Improvements** +- Added sub-commands: + +**Breaking Changes** +- custom-events -i changed to -ts +- groups -> no support for adding groups with JSON file, add groups via comma-separated list on command line +- plugins get changed to-> list, + +**TODO** +- Do a full reconciliation of all flags +- DONE: Add -debug flag +- Test input files, env vars +- Add support for adding groups via JSON file? +- Check all get/list sub-commands +- sort by date: TypeError: '<' not supported between instances of 'NoneType' and 'NoneType' + + + 0.26.6 (2024-07-30) ------------------ diff --git a/Justfile b/Justfile index 411332a..3a50f16 100644 --- a/Justfile +++ b/Justfile @@ -19,8 +19,16 @@ _check-vars: fi # Run all tests -test-all: _check-vars load-data - poetry run pytest -rA -n auto --cov=cortexapps_cli --cov-append --cov-report term-missing tests +test-all: _check-vars load-data test-parallel test-serial + +# Run tests that can run in parallel +test-parallel: _check-vars load-data + PYTHONPATH=. poetry run pytest -rA -n auto -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests + +# Run tests that have to run sequentially +test-serial: _check-vars load-data + #@if [ -f .coverage ]; then rm .coverage; fi + PYTHONPATH=. poetry run pytest -rA -n auto -m "serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests # Run a single test, ie: just test tests/test_catalog.py test testname: _check-vars @@ -30,6 +38,10 @@ test testname: _check-vars test-suite command: _check-vars poetry run pytest -k test_{{command}} +# Run all tests in a directory +test-dir dir: _check-vars + poetry run pytest {{dir}} + # Load data from 'data' directory into Cortex load-data: #!/bin/bash diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 1500166..9845a44 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -6,11 +6,13 @@ import importlib.metadata import tomllib import configparser +import logging from cortexapps_cli.cortex_client import CortexClient import cortexapps_cli.commands.api_keys as api_keys import cortexapps_cli.commands.audit_logs as audit_logs +import cortexapps_cli.commands.backup as backup import cortexapps_cli.commands.catalog as catalog import cortexapps_cli.commands.custom_data as custom_data import cortexapps_cli.commands.custom_events as custom_events @@ -43,6 +45,7 @@ # add subcommands app.add_typer(api_keys.app, name="api-keys") app.add_typer(audit_logs.app, name="audit-logs") +app.add_typer(backup.app, name="backup") app.add_typer(catalog.app, name="catalog") app.add_typer(custom_data.app, name="custom-data") app.add_typer(custom_events.app, name="custom-events") @@ -74,10 +77,15 @@ def global_callback( url: str = typer.Option("https://api.getcortexapp.com", "--url", "-u", help="Base URL for the API", envvar="CORTEX_BASE_URL"), config_file: str = typer.Option(os.path.join(os.path.expanduser('~'), '.cortex', 'config'), "--config", "-c", help="Config file path", envvar="CORTEX_CONFIG"), tenant: str = typer.Option("default", "--tenant", "-t", help="Tenant alias", envvar="CORTEX_TENANT_ALIAS"), + log_level: Annotated[str, typer.Option("--log-level", "-l", help="Set the logging level")] = "INFO" ): if not ctx.obj: ctx.obj = {} + numeric_level = getattr(logging, log_level.upper(), None) + if not isinstance(numeric_level, int): + raise ValueError(f"Invalid log level: {log_level}") + if not os.path.isfile(config_file): # no config file found if not api_key: @@ -109,7 +117,7 @@ def global_callback( api_key = api_key.strip('"\' ') url = url.strip('"\' /') - ctx.obj["client"] = CortexClient(api_key, url) + ctx.obj["client"] = CortexClient(api_key, tenant, numeric_level, url) @app.command() def version(): diff --git a/cortexapps_cli/command_options.py b/cortexapps_cli/command_options.py index 5186e73..0d845e8 100644 --- a/cortexapps_cli/command_options.py +++ b/cortexapps_cli/command_options.py @@ -31,3 +31,9 @@ class ListCommandOptions: Optional[int], typer.Option("--page-size", "-z", help="Page size for results", show_default=False) ] + +class CommandOptions: + _print = Annotated[ + Optional[bool], + typer.Option("--print", help="If result should be printed to the terminal", hidden=True) + ] diff --git a/cortexapps_cli/commands/api_keys.py b/cortexapps_cli/commands/api_keys.py index df2c1cc..874c076 100644 --- a/cortexapps_cli/commands/api_keys.py +++ b/cortexapps_cli/commands/api_keys.py @@ -4,14 +4,61 @@ from enum import Enum from typing_extensions import Annotated from cortexapps_cli.utils import print_output_with_context +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions -app = typer.Typer(help="API Keys commands", no_args_is_help=True) +app = typer.Typer( + help="API Keys commands", + no_args_is_help=True +) class DefaultRole(str, Enum): ADMIN = "ADMIN" USER = "USER" READ_ONLY = "READ_ONLY" +@app.command() +def list( + ctx: typer.Context, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List API keys. The API key used to make the request must have the Edit API keys permission. + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size + } + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "CID=cid", + "Name=name", + "Last4=last4", + "Description=description", + "Roles=roles", + "CreatedDate=createdDate", + "ExpirationDate=expirationDate", + ] + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + r = client.fetch("api/v1/auth/key", params=params) + else: + r = client.get("api/v1/auth/key", params=params) + print_output_with_context(ctx, r) + @app.command() def create( ctx: typer.Context, @@ -31,17 +78,21 @@ def create( if name or description or expiration_date or default_roles or custom_roles: raise typer.BadParameter("When providing an API definition file, do not specify any other attributes") data = json.loads("".join([line for line in file_input])) - else: + if not default_roles and not custom_roles: + raise typer.BadParameter("One of default-roles or custom-roles is required") + data = { "roles": [], "name": name } - for role in default_roles.split(","): - data["roles"].append({"role": role, "type": "DEFAULT"}) - for role in custom_roles.split(","): - data["roles"].append({"tag": role, "type": "CUSTOM"}) + if default_roles is not None: + for role in default_roles.split(","): + data["roles"].append({"role": role, "type": "DEFAULT"}) + if custom_roles is not None: + for role in custom_roles.split(","): + data["roles"].append({"tag": role, "type": "CUSTOM"}) if description: data["description"] = description @@ -49,5 +100,52 @@ def create( data["expirationDate"] = expiration_date.strftime('%Y-%m-%dT%H:%M:%S.000Z') r = client.post("api/v1/auth/key", data=data) - #print(r) print_output_with_context(ctx, r) + +@app.command() +def update( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="The unique, auto-generated identifier for the API key"), + description: str | None = typer.Option(None, "--description", "-d", help="Description of the API key"), + name: str = typer.Option(..., "--name", "-n", help="Name of the API key"), +): + """ + Update API key. The API key used to make the request must have the Edit API keys permission. + """ + client = ctx.obj["client"] + + data = { + "name": name + } + if description is not None: + data["description"] = description + + r = client.put("api/v1/auth/key/" + cid, data=data) + print_output_with_context(ctx, r) + +@app.command() +def get( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="The unique, auto-generated identifier for the API key"), +): + """ + Get API key. + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/auth/key/"+ cid) + print_output_with_context(ctx, r) + +@app.command() +def delete( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="The unique, auto-generated identifier for the API key"), +): + """ + Delete API key. + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/auth/key/"+ cid) diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index f7fcb04..c6d23f8 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -1,8 +1,13 @@ from datetime import datetime from enum import Enum import typer +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output -app = typer.Typer(help="Audit log commands", no_args_is_help=True) +app = typer.Typer( + help="Audit log commands", + no_args_is_help=True +) class Action(str, Enum): CREATE = "CREATE" @@ -31,12 +36,17 @@ def get( actorIpAddresses: list[str] | None = typer.Option(None, "--actorIpAddresses", "-ai", help="Source IP Addresses associated with audit event"), actorRequestTypes: list[ActorRequestType] | None = typer.Option(None, "--actorRequestTypes", "-ar", help="Request event associated with audit event"), actorTypes: list[ActorType] | None = typer.Option(None, "--actorTypes", "-at", help="Actor that triggered the audit event"), - end_time: datetime = typer.Option(None, "--endTime", "-e", help="End time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + end_time: datetime = typer.Option(None, "--end-time", "-e", help="End time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), objectIdentifiers: list[str] | None = typer.Option(None, "--objectIdentifiers", "-oi", help="The name of the Cortex object that was modified, ie x-cortex-tag value, metadata field name, etc."), objectTypes: list[str] | None = typer.Option(None, "--objectTypes", "-ot", help="ObjectTypes"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), - start_time: datetime = typer.Option(None, "--startTime", "-s", help="Start time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + start_time: datetime = typer.Option(None, "--start-time", "-s", help="Start time of audit logs to retrieve", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ Note: To see the complete list of possible values, please reference the available filter options for audit logs under Settings in the app. @@ -68,4 +78,19 @@ def get( if str(type(v)) == "": params[k] = ','.join(v) - client.fetch_or_get("api/v1/audit-logs", page, params=params) + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Action=action", + "ObjectType=objectType", + "ActorIdentifier=actorIdentifier", + "ObjectIdentifier=objectIdentifier", + "IpAddress=ipAddress", + "Timestamp=timestamp", + ] + + if page is None: + r = client.fetch("api/v1/audit-logs", params=params) + else: + r = client.get("api/v1/audit-logs", params=params) + + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/backup.py b/cortexapps_cli/commands/backup.py new file mode 100644 index 0000000..dcc5df3 --- /dev/null +++ b/cortexapps_cli/commands/backup.py @@ -0,0 +1,274 @@ +from datetime import datetime +from typing import Optional +from typing import List +from typing_extensions import Annotated +import typer +import json +import os +from rich import print, print_json +from rich.console import Console +from enum import Enum +import yaml + +import cortexapps_cli.commands.scorecards as scorecards +import cortexapps_cli.commands.catalog as catalog +import cortexapps_cli.commands.entity_types as entity_types +import cortexapps_cli.commands.ip_allowlist as ip_allowlist +import cortexapps_cli.commands.plugins as plugins +import cortexapps_cli.commands.workflows as workflows + + +app = typer.Typer(help="Backup commands") + +# Need to support the following: +# DONE -> Catalog +# Custom Data from API +# Custom Events +# Custom Metrics +# Dependencies from API +# Deploys from API +# Docs +# Eng Intel - User Labels +# DONE -> Entity Types +# Groups from API -> would have to loop over all entities +# DONE -> IP Allowlist +# Packages from API -> would have to loop over all entities +# DONE -> Plugins +# DONE -> Scorecards +# Secrets +# DONE -> Workflows + +def _create_directory(directory): + if not os.path.isdir(directory): + os.mkdir(directory, 0o700) + +def _directory_name(directory, backup_type): + directory = directory + "/" + backup_type + _create_directory(directory) + print("Getting " + backup_type) + return directory + +def _file_name(directory, tag, content, extension): + print("--> " + tag) + file = directory + "/" + tag + "." + extension + if extension == "json": + is_json = True + else: + is_json = False + _write_file(content, file, is_json) + +def _write_file(content, file, is_json=False): + with open(file, 'w') as f: + if is_json: + # plugins return a dict? + + #json_data = json.loads(str(content).replace("'", '"')) # Fixing single quotes to double quotes for JSON format + + #json_data = json.loads(content) + #json.dump(json_data, f, indent=4) + + #print_json(json_data, file=f) + #print(json.dumps(json_data, indent=4), file=f) + + #print_json(data=content, file=f) + print(content, file=f) + + #json.dump(content, f, indent=4) + #console.print_json(content) + #console = Console(record=True) + #console.print_json(data=content) + #f.write(console.export_text()) + #f.write(data) + else: + f.write(str(content) + "\n") + f.close() + +def _catalog(ctx, directory, catalog_types): + directory = _directory_name(directory, "catalog") + + data = catalog.list_descriptors(ctx, types=catalog_types, page_size=1000, yaml="true", _print=False) + + for descriptor in data['descriptors']: + try: + y = yaml.safe_load(str(descriptor)) + tag = y['info']['x-cortex-tag'] + y = yaml.dump(y, default_flow_style=False) + except: + print("error") + print(str(descriptor)) + continue + finally: + # Slash will be interpreted as a sub-directory + tag = tag.replace("/", "-") + _file_name(directory, tag, y, "yaml") + +def _entity_types(ctx, directory): + directory = _directory_name(directory, "entity-types") + + data = entity_types.list(ctx, include_built_in=False, page=0, page_size=250, _print=False) + definitions_sorted = sorted(data['definitions'], key=lambda x: x["type"]) + + for definition in definitions_sorted: + tag = definition['type'] + json_string = json.dumps(definition, indent=4) + _file_name(directory, tag, json_string, "json") + +def _ip_allowlist(ctx, directory): + directory = _directory_name(directory, "ip-allowlist") + #file = directory + "/ip-allowlist.json" + + content = ip_allowlist.get(ctx, page=None, page_size=None, _print=False) + _file_name(directory, "ip-allowlist", str(content), "json") + +def _plugins(ctx, directory): + directory = _directory_name(directory, "plugins") + + list = plugins.list(ctx, _print=False, include_drafts="true", page=None, page_size=None) + tags = [plugin["tag"] for plugin in list["plugins"]] + tags_sorted = sorted(tags) + for tag in tags_sorted: + content = plugins.get(ctx, tag_or_id=tag, include_blob="true", _print=False) + _file_name(directory, tag, content, "json") + +def _scorecards(ctx, directory): + directory = _directory_name(directory, "scorecards") + + list = scorecards.list(ctx, show_drafts=True, page=None, page_size=None, _print=False) + tags = [scorecard["tag"] for scorecard in list["scorecards"]] + tags_sorted = sorted(tags) + for tag in tags_sorted: + content = scorecards.descriptor(ctx, scorecard_tag=tag, _print=False) + _file_name(directory, tag, content, "yaml") + +def _workflows(ctx, directory): + directory = _directory_name(directory, "workflows") + + list = workflows.list(ctx, _print=False, include_actions="false", page=None, page_size=None, search_query=None) + tags = [workflow["tag"] for workflow in list["workflows"]] + tags_sorted = sorted(tags) + for tag in tags_sorted: + try: + content = workflows.get(ctx, tag=tag, yaml="true", _print=False) + _file_name(directory, tag, content, "yaml") + except: + print("failed for " + tag) + +backupTypes = { + "catalog", + "entity-types", + "ip-allowlist", + "plugins", + "scorecards", + "workflows" +} +backupString = ','.join(backupTypes) + +def _parse_export_types(value: str) -> List[str]: + if value == "all": + return backupTypes + types = [] + for val in value: + for item in val.split(","): + if item not in backupTypes: + raise typer.BadParameter(item + " is not a valid type. Valid types are: " + backupString + ".") + else: + types.append(item) + return types + +def _parse_catalog_types(ctx, catalog_types): + data = entity_types.list(ctx, include_built_in=True, page=0, page_size=250, _print=False) + + built_in = ['service', 'team', 'domain'] + tags = [entity_type["type"] for entity_type in data["definitions"]] + tags_sorted = sorted(tags + built_in) + all_types_string = ','.join(tags_sorted) + if catalog_types == "all": + return all_types_string + + for item in catalog_types.split(","): + if item not in tags_sorted: + raise typer.BadParameter(item + " is not a valid type. Valid types are: " + all_types_string + ".") + return catalog_types + +@app.command() +def export( + ctx: typer.Context, + export_types: List[str] = typer.Option(_parse_export_types("all"), "--export-types", "-e", help="some help test", callback=_parse_export_types), + catalog_types: str = typer.Option("all", "--catalog-types", "-c", help="Comma separated list of catalog types to export, defaults to all"), + directory: str = typer.Option(os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), "--directory", "-d", help="Location of export directory, defaults to ~/.cortex/export/-tenant"), +): + """ + Export tenant + + + """ + export_types = sorted(list(set(export_types))) + + client = ctx.obj["client"] + catalog_types = _parse_catalog_types(ctx, catalog_types) + directory = directory + "-" + client.tenant + _create_directory(directory) + if "catalog" in export_types: + _catalog(ctx, directory, catalog_types) + if "entity-types" in export_types: + _entity_types(ctx, directory) + if "ip-allowlist" in export_types: + _ip_allowlist(ctx, directory) + if "plugins" in export_types: + _plugins(ctx, directory) + if "scorecards" in export_types: + _scorecards(ctx, directory) + if "workflows" in export_types: + _workflows(ctx, directory) + + print("\nExport complete!") + print("Contents available in " + directory) + +def _import_ip_allowlist(directory): + if os.path.isdir(directory): + print("Processing: " + directory) + for filename in os.listdir(directory): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + ip_allowlist.get(ctx, file=file_path, force=False, _print=False) + +def _import_entity_types(directory): + if os.path.isdir(directory): + print("FOUND: " + directory) + +def _import_catalog(directory): + if os.path.isdir(directory): + print("FOUND: " + directory) + +def _import_plugins(directory): + if os.path.isdir(directory): + print("FOUND: " + directory) + +def _import_scorecards(directory): + if os.path.isdir(directory): + print("FOUND: " + directory) + +def _import_workflows(directory): + if os.path.isdir(directory): + print("FOUND: " + directory) + +@app.command("import") +def import_tenant( + ctx: typer.Context, + directory: str = typer.Option(..., "--directory", "-d", help="Location of import directory."), +): + """ + Import data into tenant + + """ + client = ctx.obj["client"] + + print("import directory = " + directory) + _import_ip_allowlist(directory + "/ip-allowlist") + _import_entity_types(directory + "/entity-types") + _import_catalog(directory + "/catalog") + _import_plugins(directory + "/plugins") + _import_scorecards(directory + "/scorecards") + _import_workflows(directory + "/workflows") + diff --git a/cortexapps_cli/commands/backup_commands/cortex_export.py b/cortexapps_cli/commands/backup_commands/cortex_export.py new file mode 100644 index 0000000..5134acd --- /dev/null +++ b/cortexapps_cli/commands/backup_commands/cortex_export.py @@ -0,0 +1,174 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated + +app = typer.Typer(help="Azure Devops commands", no_args_is_help=True) + +@app.command() +def add( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="Alias for this configuration"), + host: str = typer.Option(None, "--host", "-h", help="Optional host name"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), + organization_slug: str = typer.Option(..., "--organization-slug", "-o", help="Identifier for organization"), + personal_access_token: str = typer.Option(..., "--pat", "-p", help="Personal Access Token"), + username: str = typer.Option(..., "--username", "-u", help="Username"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations, if command line options not used; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add a single configuration + """ + + client = ctx.obj["client"] + + if file_input: + if alias or is_default or host or organization_slug or personal_access_token or username: + raise typer.BadParameter("When providing a custom event definition file, do not specify any other custom event attributes") + data = json.loads("".join([line for line in file_input])) + else: + data = { + "alias": alias, + "host": host, + "isDefault": is_default, + "organizationSlug": organization_slug, + "personalAccessToken": personal_access_token, + "username": username + } + + # remove any data elements that are None - can only be is_default + data = {k: v for k, v in data.items() if v is not None} + + r = client.post("api/v1/azure-devops/configuration", data=data) + print_json(data=r) + +@app.command() +def add_multiple( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="JSON file containing configurations; can be passed as stdin with -, example: -f-")] = None, +): + """ + Add multiple configurations + """ + + client = ctx.obj["client"] + + data = json.loads("".join([line for line in file_input])) + + r = client.put("api/v1/aws/configurations", data=data) + print_json(data=r) + +@app.command() +def delete( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Delete a configuration + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def delete_all( + ctx: typer.Context, +): + """ + Delete all configurations + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Get a configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configuration/" + alias) + print_json(data=r) + +@app.command() +def get_all( + ctx: typer.Context, +): + """ + Get all configurations + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/configurations") + print_json(data=r) + +@app.command() +def get_default( + ctx: typer.Context, +): + """ + Get default configuration + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/azure-devops/default-configuration") + print_json(data=r) + + +@app.command() +def update( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), + is_default: bool = typer.Option(False, "--is-default", "-i", help="If this is the default configuration"), +): + """ + Update a configuration + """ + + client = ctx.obj["client"] + + data = { + "alias": alias, + "isDefault": is_default + } + + r = client.put("api/v1/azure-devops/configuration/" + alias, data=data) + print_json(data=r) + +@app.command() +def validate( + ctx: typer.Context, + alias: str = typer.Option(..., "--alias", "-a", help="The alias of the configuration"), +): + """ + Validate a configuration + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations/validate" + alias) + print_json(data=r) + +@app.command() +def validate_all( + ctx: typer.Context, +): + """ + Validate all configurations + """ + + client = ctx.obj["client"] + + r = client.post("api/v1/azure-devops/configurations") + print_json(data=r) diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index 3c8a1ed..cf15272 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -2,8 +2,8 @@ from typing import Optional, List from typing_extensions import Annotated -from cortexapps_cli.command_options import ListCommandOptions -from cortexapps_cli.utils import print_output_with_context +from cortexapps_cli.command_options import ListCommandOptions, CommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output app = typer.Typer(help="Catalog commands", no_args_is_help=True) @@ -82,6 +82,7 @@ def catalog_list( columns: ListCommandOptions.columns = [], filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], + _print: CommandOptions._print = True, ): """ List entities in the catalog @@ -128,8 +129,11 @@ def catalog_list( # if page is specified, we want to fetch only that page r = client.get("api/v1/catalog", params=params) - data = r - print_output_with_context(ctx, data) + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) @app.command() def details( @@ -236,6 +240,7 @@ def descriptor( ctx: typer.Context, tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ Retrieve entity descriptor @@ -247,10 +252,17 @@ def descriptor( } r = client.get("api/v1/catalog/" + tag + "/openapi", params=params) - if yaml: - print(r) + if _print: + if yaml: + print(r) + else: + print_output_with_context(ctx, r) + #print(r) else: - print_output_with_context(ctx, r) + if yaml: + return(r) + else: + print_output_with_context(ctx, r) @app.command() def create( @@ -299,6 +311,7 @@ def list_descriptors( types: CatalogCommandOptions.types = None, page: ListCommandOptions.page = None, page_size: ListCommandOptions.page_size = 250, + _print: CommandOptions._print = True, ): """ List entity descriptors @@ -312,8 +325,9 @@ def list_descriptors( "page": page } - r = client.get("api/v1/catalog/descriptors", params=params) - print_output_with_context(ctx, r) + r = client.fetch_or_get("api/v1/catalog/descriptors", page, _print, params=params) + if not _print: + return(r) @app.command() def gitops_log( diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 42cb3fa..413b72b 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -1,6 +1,8 @@ import json import typer from typing_extensions import Annotated +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output from rich import print_json @@ -154,9 +156,16 @@ def get( @app.command() def list( ctx: typer.Context, - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + #page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + #page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List custom data for entity @@ -168,4 +177,23 @@ def list( "pageSize": page_size } - client.fetch_or_get("api/v1/catalog/" + tag + "/custom-data", page, params=params) + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Id=id", + "Key=key", + "Value=value", + "Date=dateUpdated", + "Source=source", + ] + + #client.fetch_or_get("api/v1/catalog/" + tag + "/custom-data", page, params=params) + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag + "/custom-data", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/custom-data", params=params) + + data = r + print_output_with_context(ctx, data) diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py index dba2862..890aeae 100644 --- a/cortexapps_cli/commands/custom_events.py +++ b/cortexapps_cli/commands/custom_events.py @@ -4,8 +4,14 @@ from rich import print_json import typer from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output -app = typer.Typer(help="Custom events commands", no_args_is_help=True) +app = typer.Typer( + help="Custom events commands", + no_args_is_help=True +) # Need a helper function to parse custom_data. # cannot do this in type: list[Tuple[str, str]] | None = typer.Option(None) @@ -155,11 +161,17 @@ def delete_all( @app.command() def list( ctx: typer.Context, - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), event: str = typer.Option(None, "--type", "-y", help="The type of the custom event, defaults to all."), timestamp: datetime = typer.Option(None, "--timestamp", "-ts", help="Optional timestamp of custom events to delete.", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List custom events for entity @@ -181,7 +193,31 @@ def list( if str(type(v)) == "": params[k] = v.strftime('%Y-%m-%dT%H:%M:%S') - client.fetch_or_get("api/v1/catalog/" + tag + "/custom-events", page, params=params) + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "UUID=uuid", + "title=title", + "Description=description", + "Url=url", + "Timestamp=timestamp", + "Type=type", + "customData=customData", + ] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag + "/custom-events", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag + "/custom-events", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + + #client.fetch_or_get("api/v1/catalog/" + tag + "/custom-events", page, prt, params=params) @app.command() def get_by_uuid( diff --git a/cortexapps_cli/commands/custom_metrics.py b/cortexapps_cli/commands/custom_metrics.py index 9a1a508..4183cca 100644 --- a/cortexapps_cli/commands/custom_metrics.py +++ b/cortexapps_cli/commands/custom_metrics.py @@ -35,6 +35,7 @@ def get( end_date: datetime = typer.Option(None, "--end-date", "-s", help="End date for the filter (inclusive)", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ List custom metrics data points for an entity @@ -54,7 +55,7 @@ def get( params = _convert_datetime_to_string(params) - client.fetch_or_get("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, page, params=params) + client.fetch_or_get("api/v1/eng-intel/custom-metrics/" + custom_metric_key + "/entity/" + tag, page, prt, params=params) @app.command() def add( diff --git a/cortexapps_cli/commands/dependencies.py b/cortexapps_cli/commands/dependencies.py index 5b9ccfd..e64736b 100644 --- a/cortexapps_cli/commands/dependencies.py +++ b/cortexapps_cli/commands/dependencies.py @@ -117,6 +117,7 @@ def get_all( include_outgoing: bool = typer.Option(False, "--include-outgoing", "-o", help="Include outgoing dependencies"), page: int = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ Retrieve all dependencies for an entity @@ -134,7 +135,7 @@ def get_all( client = ctx.obj["client"] - client.fetch_or_get("api/v1/catalog/" + caller_tag + "/dependencies", page, params=params) + client.fetch_or_get("api/v1/catalog/" + caller_tag + "/dependencies", page, prt, params=params) @app.command() def delete( @@ -169,7 +170,7 @@ def get( path: str = typer.Option(None, "--path", "-p", help="The path of the dependency") ): """ - Retrieve dependency betweek entities + Retrieve dependency between entities """ params = { diff --git a/cortexapps_cli/commands/deploys.py b/cortexapps_cli/commands/deploys.py index fc3388e..e15173a 100644 --- a/cortexapps_cli/commands/deploys.py +++ b/cortexapps_cli/commands/deploys.py @@ -106,6 +106,7 @@ def deploys_list( tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ List deployments for entity @@ -121,7 +122,7 @@ def deploys_list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.fetch_or_get("api/v1/catalog/" + tag + "/deploys", page, params=params) + client.fetch_or_get("api/v1/catalog/" + tag + "/deploys", page, prt, params=params) @app.command() def add( @@ -136,7 +137,7 @@ def add( timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), title: str = typer.Option(None, "--title", "-ti", help="The title of the deploy"), type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), - url: str = typer.Option(None, "--url", "-u", help="The Uniform Resource Locator(URL) of the deploy") + url: str = typer.Option(None, "--url", help="The Uniform Resource Locator(URL) of the deploy") ): """ Add deployment for entity @@ -176,7 +177,7 @@ def add( def delete_by_uuid( ctx: typer.Context, tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity"), - uuid: str = typer.Option(..., "--uuid", "-uu", help="The Universally Unique Identifier (UUID) of the deploy") + uuid: str = typer.Option(..., "--uuid", "-u", help="The Universally Unique Identifier (UUID) of the deploy") ): """ Delete deployment by uuid @@ -199,8 +200,8 @@ def update_by_uuid( timestamp: datetime = typer.Option(datetime.now(), "--timestamp", "-ts", help="Timestamp of the deploy", formats=["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"]), title: str = typer.Option(None, "--title", "-ti", help="The title of the deploy"), deploy_type: Type = typer.Option(None, "--type", "-ty", help="The type of the deploy"), - url: str = typer.Option(None, "--url", "-u", help="The Uniform Resource Locator(URL) of the deploy"), - uuid: str = typer.Option(..., "--uuid", "-uu", help="The Universally Unique Identifier (UUID) of the deploy") + url: str = typer.Option(None, "--url", help="The Uniform Resource Locator(URL) of the deploy"), + uuid: str = typer.Option(..., "--uuid", "-u", help="The Universally Unique Identifier (UUID) of the deploy") ): """ Update deployment for entity diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py index 89d5cd6..7aec9bd 100644 --- a/cortexapps_cli/commands/entity_types.py +++ b/cortexapps_cli/commands/entity_types.py @@ -5,6 +5,9 @@ from rich import print_json import typer from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output app = typer.Typer(help="Entity Types commands", no_args_is_help=True) @@ -12,8 +15,14 @@ def list( ctx: typer.Context, include_built_in: bool = typer.Option(False, "--include-built-in", "-ib", help="When true, returns the built-in entity types that Cortex provides, such as rds and s3, defaults to false"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List entity types, excludes Cortex default types of service, domain, and team @@ -29,13 +38,32 @@ def list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - - client.fetch_or_get("api/v1/catalog/definitions", page, params=params) + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Type=type", + "Source=tag", + "Name=name", + "Description=description", + ] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/definitions", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/definitions", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) @app.command() def delete( ctx: typer.Context, - entity_type: str = typer.Option(..., "--type", "-ty", help="The entity type"), + entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), ): """ Delete entity type @@ -59,3 +87,17 @@ def create( r = client.post("api/v1/catalog/definitions/" + entity_type) print_json(data=r) + +@app.command() +def get( + ctx: typer.Context, + entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), +): + """ + Retrieve entity type + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/catalog/definitions/" + entity_type) + print_json(data=r) diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py index b255ec4..acecf06 100644 --- a/cortexapps_cli/commands/gitops_logs.py +++ b/cortexapps_cli/commands/gitops_logs.py @@ -2,8 +2,13 @@ #import json #from rich import print_json import typer +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context -app = typer.Typer(help="GitOps Logs commands", no_args_is_help=True) +app = typer.Typer( + help="GitOps Logs commands", + no_args_is_help=True +) class Operation(str, Enum): ARCHIVED = "ARCHIVED" @@ -20,8 +25,15 @@ def get( sha: str = typer.Option(None, "--sha", "-s", help="Commit SHA"), operation: Operation = typer.Option(None, "--operation", "-o", help="One of CREATED, UPDATED, ARCHIVED, NO_CHANGE"), error_only: bool = typer.Option(False, "--error-only", "-eo", help="Only include entries with errors"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + #page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + #page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ Retrieve GitOps logs. API key must have the 'View GitOps logs' permission. @@ -42,5 +54,21 @@ def get( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Files=files", + "repositoryName=repository.repositoryName", + "provider=repository.provider", + "Commit=commit", + "Date=dateCreated", + ] - client.fetch_or_get("api/v1/gitops-logs", page, params=params) + #prt = True + #client.fetch_or_get("api/v1/gitops-logs", page, prt, params=params) + if page is None: + r = client.fetch("api/v1/gitops-logs", params=params) + else: + r = client.get("api/v1/gitops-logs", params=params) + + print_output_with_context(ctx, r) diff --git a/cortexapps_cli/commands/groups.py b/cortexapps_cli/commands/groups.py index 45c6f7b..0f1a4a7 100644 --- a/cortexapps_cli/commands/groups.py +++ b/cortexapps_cli/commands/groups.py @@ -10,6 +10,7 @@ def get( tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + prt: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ Get groups for entity. @@ -25,7 +26,7 @@ def get( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.fetch_or_get("api/v1/catalog/" + tag_or_id + "/groups", page, params=params) + client.fetch_or_get("api/v1/catalog/" + tag_or_id + "/groups", page, prt, params=params) @app.command() def add( diff --git a/cortexapps_cli/commands/ip_allowlist.py b/cortexapps_cli/commands/ip_allowlist.py index d692725..b15c617 100644 --- a/cortexapps_cli/commands/ip_allowlist.py +++ b/cortexapps_cli/commands/ip_allowlist.py @@ -1,4 +1,6 @@ import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions app = typer.Typer(help="IP Allowlist commands", no_args_is_help=True) @@ -7,6 +9,7 @@ def get( ctx: typer.Context, page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: CommandOptions._print = True, ): """ Get allowlist of IP addresses & ranges @@ -22,12 +25,16 @@ def get( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.fetch_or_get("api/v1/ip-allowlist", page, params=params) + if _print: + client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) + else: + return client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) @app.command() def replace( ctx: typer.Context, addresses: str = typer.Option(..., "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'"), + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, force: bool = typer.Option(False, "--force", "-o", help="When true, entries will be updated even if the list doesn't contain the requestor's IP address") ): """ @@ -36,9 +43,12 @@ def replace( client = ctx.obj["client"] - data = { - "entries": [{"address": x.split(':')[0], "description": None if len(x.split(':')) < 2 else x.split(':')[1]} for x in addresses.split(',')] - } + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + data = { + "entries": [{"address": x.split(':')[0], "description": None if len(x.split(':')) < 2 else x.split(':')[1]} for x in addresses.split(',')] + } params = { "force": force, diff --git a/cortexapps_cli/commands/packages.py b/cortexapps_cli/commands/packages.py index dfac1bc..9090772 100644 --- a/cortexapps_cli/commands/packages.py +++ b/cortexapps_cli/commands/packages.py @@ -6,8 +6,14 @@ import cortexapps_cli.commands.packages_commands.python as python import cortexapps_cli.commands.packages_commands.node as node import cortexapps_cli.commands.packages_commands.nuget as nuget +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer( + help="Packages commands", + no_args_is_help=True +) -app = typer.Typer(help="Packages commands", no_args_is_help=True) app.add_typer(go.app, name="go") app.add_typer(java.app, name="java") app.add_typer(python.app, name="python") @@ -18,8 +24,13 @@ def list( ctx: typer.Context, tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List packages for entity @@ -34,8 +45,24 @@ def list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - - client.fetch_or_get("api/v1/catalog/" + tag_or_id + "/packages", page, params=params) + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Id=id", + "PackageType=packageType", + "Name=name", + "Version=version", + "DateCreated=dateCreated", + ] + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/catalog/" + tag_or_id + "/packages", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/catalog/" + tag_or_id + "/packages", params=params) + + print_output_with_context(ctx, r) @app.command() def delete_all( diff --git a/cortexapps_cli/commands/plugins.py b/cortexapps_cli/commands/plugins.py index 4aff647..6bafd88 100644 --- a/cortexapps_cli/commands/plugins.py +++ b/cortexapps_cli/commands/plugins.py @@ -1,16 +1,28 @@ -import json +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output from rich import print_json -import typer from typing_extensions import Annotated +import json +import typer -app = typer.Typer(help="Plugins commands", no_args_is_help=True) +app = typer.Typer( + help="Plugins commands", + no_args_is_help=True +) @app.command() def list( ctx: typer.Context, include_drafts: bool = typer.Option(False, "--include-drafts", "-i", help="Also include plugins that are in draft mode"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ Retrieve a list of all plugins, excluding drafts @@ -26,7 +38,33 @@ def list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.fetch_or_get("api/v1/plugins", page, params=params) + #if _print: + # client.fetch_or_get("api/v1/plugins", page, _print, params=params) + #else: + # return client.fetch_or_get("api/v1/plugins", page, _print, params=params) + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Name=name", + "Tag=tag", + "Description=description", + ] + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/plugins", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/plugins", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) @app.command() def create( @@ -57,16 +95,25 @@ def delete( @app.command() def get( ctx: typer.Context, - tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity.") + tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), + include_blob: bool = typer.Option(False, "--include-blob", "-i", help="When true, returns the plugin blob. Defaults to false."), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ Retrieve the metadata of a plugin by tag """ client = ctx.obj["client"] + + params = { + "includeBlob": include_blob, + } - r = client.get("api/v1/plugins/" + tag_or_id) - print_json(data=r) + r = client.get("api/v1/plugins/" + tag_or_id, params=params) + if _print: + print_json(data=r) + else: + return(r) @app.command() def replace( diff --git a/cortexapps_cli/commands/scorecards.py b/cortexapps_cli/commands/scorecards.py index 6f02c2c..a592239 100644 --- a/cortexapps_cli/commands/scorecards.py +++ b/cortexapps_cli/commands/scorecards.py @@ -2,10 +2,16 @@ from rich import print_json import typer from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output import cortexapps_cli.commands.scorecards_commands.exemptions as exemptions -app = typer.Typer(help="Scorecards commands", no_args_is_help=True) +app = typer.Typer( + help="Scorecards commands", + no_args_is_help=True +) app.add_typer(exemptions.app, name="exemptions") @app.command() @@ -46,8 +52,16 @@ def delete( def list( ctx: typer.Context, show_drafts: bool = typer.Option(False, "--show-drafts", "-s", help="Whether scorecard in draft mode should be included"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + #page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), + #page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ List scorecards @@ -56,13 +70,39 @@ def list( client = ctx.obj["client"] params = { + "page": page, + "pageSize": page_size, "showDrafts": show_drafts } # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - - client.fetch_or_get("api/v1/scorecards", page, params=params) + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Name=name", + "Tag=tag", + "Description=description", + "IsDraft=isDraft", + ] + + #if _print: + # client.fetch_or_get("api/v1/scorecards", page, _print, params=params) + #else: + # return client.fetch_or_get("api/v1/scorecards", page, _print, params=params) + + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/scorecards", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/scorecards", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) @app.command() def shield( @@ -97,6 +137,7 @@ def get( def descriptor( ctx: typer.Context, scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ Get scorecards YAML descriptor @@ -105,7 +146,10 @@ def descriptor( client = ctx.obj["client"] r = client.get("api/v1/scorecards/" + scorecard_tag + "/descriptor") - print(r) + if _print: + print(r) + else: + return(r) @app.command() def next_steps( @@ -133,6 +177,7 @@ def scores( scorecard_tag: str = typer.Option(..., "--scorecard-tag", "-s", help="Unique tag for the scorecard"), page: int = typer.Option(0, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: bool = typer.Option(True, "--print", help="If result should be printed to the terminal", hidden=True), ): """ Return latest scores for all entities in the Scorecard @@ -149,5 +194,5 @@ def scores( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.fetch_or_get("api/v1/scorecards/" + scorecard_tag + "/scores", page, params=params) + client.fetch_or_get("api/v1/scorecards/" + scorecard_tag + "/scores", page, _print, params=params) diff --git a/cortexapps_cli/commands/workflows.py b/cortexapps_cli/commands/workflows.py index 8507954..f228850 100644 --- a/cortexapps_cli/commands/workflows.py +++ b/cortexapps_cli/commands/workflows.py @@ -1,17 +1,29 @@ -import typer -from cortexapps_cli.utils import print_output_with_context +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output from typing_extensions import Annotated import json +import typer +import yaml -app = typer.Typer(help="Workflows commands", no_args_is_help=True) +app = typer.Typer( + help="Workflows commands", + no_args_is_help=True +) @app.command() def list( ctx: typer.Context, include_actions: bool = typer.Option(False, "--include-actions", "-i", help="When true, returns the list of actions for each workflow. Defaults to false"), search_query: str = typer.Option(None, "--search-query", "-s", help="When set, only returns workflows with the given substring in the name or description"), - page: int | None = typer.Option(None, "--page", "-p", help="Page number to return, 0 indexed - omit to fetch all pages"), - page_size: int | None = typer.Option(None, "--page-size", "-z", help="Page size for results"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], ): """ Get users based on provided criteria. API key must have the View workflows permission @@ -26,16 +38,35 @@ def list( "pageSize": page_size } + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "Name=name", + "Tag=tag", + "Description=description", + ] + # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - r = client.get("api/v1/workflows", params=params) - print_output_with_context(ctx, r) + if page is None: + # if page is not specified, we want to fetch all pages + r = client.fetch("api/v1/workflows", params=params) + else: + # if page is specified, we want to fetch only that page + r = client.get("api/v1/workflows", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) @app.command() def get( ctx: typer.Context, tag: str = typer.Option(..., "--tag", "-t", help="The tag or unique, auto-generated identifier for the workflow"), + yaml: bool = typer.Option(False, "--yaml", "-y", help="When true, returns the YAML representation of the descriptor."), + _print: CommandOptions._print = True, ): """ Retrieve workflow by tag or ID. API key must have the View workflows permission. @@ -43,8 +74,22 @@ def get( client = ctx.obj["client"] - r = client.get("api/v1/workflows/" + tag) - print_output_with_context(ctx, r) + if yaml: + headers={'Accept': 'application/yaml'} + else: + headers={'Accept': 'application/json'} + r = client.get("api/v1/workflows/" + tag, headers=headers) + + if _print: + if yaml: + print(r) + else: + print_output_with_context(ctx, r) + else: + if yaml: + return(r) + else: + print_output_with_context(ctx, r) @app.command() def delete( diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index 3cdf072..b7e1ed8 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -5,15 +5,21 @@ from rich import print_json from rich.markdown import Markdown from rich.console import Console +import logging import urllib.parse from cortexapps_cli.utils import guess_data_key + class CortexClient: - def __init__(self, api_key, base_url='https://api.getcortexapp.com'): + def __init__(self, api_key, tenant, numeric_level, base_url='https://api.getcortexapp.com'): self.api_key = api_key + self.tenant = tenant self.base_url = base_url + logging.basicConfig(level=numeric_level) + self.logger = logging.getLogger(__name__) + def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=False, raw_response=False, content_type='application/json'): req_headers = { 'Authorization': f'Bearer {self.api_key}', @@ -29,6 +35,11 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=F response = requests.request(method, url, params=params, headers=req_headers, data=req_data) + self.logger.debug(f"Request Headers: {response.request.headers}") + self.logger.debug(f"Response Status Code: {response.status_code}") + self.logger.debug(f"Response Headers: {response.headers}") + self.logger.debug(f"Response Content: {response.text}") + if not response.ok: try: # try to parse the error message @@ -56,8 +67,8 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=F else: return None - def get(self, endpoint, params={}, headers={}, raw_response=False): - return self.request('GET', endpoint, params=params, headers=headers, raw_response=raw_response) + def get(self, endpoint, params={}, headers={}, raw_response=False, content_type='application/yaml'): + return self.request('GET', endpoint, params=params, headers=headers, raw_response=raw_response, content_type=content_type) def post(self, endpoint, data={}, params={}, headers={}, raw_body=False, raw_response=False, content_type='application/json'): return self.request('POST', endpoint, data=data, params=params, headers=headers, raw_body=raw_body, raw_response=raw_response, content_type=content_type) @@ -112,7 +123,7 @@ def fetch(self, endpoint, params={}, headers={}): data_key: data, } - def fetch_or_get(self, endpoint, page, params={}): + def fetch_or_get(self, endpoint, page, prt, params={}): if page is None: # if page is not specified, we want to fetch all pages r = self.fetch(endpoint, params=params) @@ -120,7 +131,11 @@ def fetch_or_get(self, endpoint, page, params={}): # if page is specified, we want to fetch only that page r = self.get(endpoint, params=params) - print_json(data=r) + if prt: + print_json(data=r) + else: + return(r) + def get_entity(self, entity_tag: str, entity_type: str = ''): match entity_type.lower(): diff --git a/poetry.lock b/poetry.lock index 6890b5e..3ec3983 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,124 +1,129 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "certifi" -version = "2024.7.4" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -130,74 +135,92 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" -version = "7.5.4" +version = "7.8.2" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["test"] files = [ - {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, - {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, - {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, - {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, - {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, - {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, - {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, - {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, - {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, - {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a"}, + {file = "coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404"}, + {file = "coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7"}, + {file = "coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347"}, + {file = "coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9"}, + {file = "coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54"}, + {file = "coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a"}, + {file = "coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975"}, + {file = "coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53"}, + {file = "coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c"}, + {file = "coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f"}, + {file = "coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8"}, + {file = "coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223"}, + {file = "coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f"}, + {file = "coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca"}, + {file = "coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48"}, + {file = "coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7"}, + {file = "coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3"}, + {file = "coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7"}, + {file = "coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008"}, + {file = "coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199"}, + {file = "coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8"}, + {file = "coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d"}, + {file = "coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b"}, + {file = "coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a"}, + {file = "coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7"}, + {file = "coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a"}, + {file = "coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e"}, + {file = "coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837"}, + {file = "coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32"}, + {file = "coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27"}, ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "execnet" @@ -205,6 +228,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -215,32 +239,56 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +groups = ["main", "dev"] files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -259,12 +307,84 @@ profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -272,39 +392,42 @@ files = [ [[package]] name = "packaging" -version = "24.1" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev", "test"] files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev", "test"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -312,20 +435,21 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev", "test"] files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -336,6 +460,7 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -348,15 +473,55 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-html" +version = "4.1.1" +description = "pytest plugin for generating HTML reports" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_html-4.1.1-py3-none-any.whl", hash = "sha256:c8152cea03bd4e9bee6d525573b67bbc6622967b72b9628dda0ea3e2a0b5dd71"}, + {file = "pytest_html-4.1.1.tar.gz", hash = "sha256:70a01e8ae5800f4a074b56a4cb1025c8f4f9b038bba5fe31e3c98eb996686f07"}, +] + +[package.dependencies] +jinja2 = ">=3.0.0" +pytest = ">=7.0.0" +pytest-metadata = ">=2.0.0" + +[package.extras] +docs = ["pip-tools (>=6.13.0)"] +test = ["assertpy (>=1.1)", "beautifulsoup4 (>=4.11.1)", "black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "pytest-mock (>=3.7.0)", "pytest-rerunfailures (>=11.1.2)", "pytest-xdist (>=2.4.0)", "selenium (>=4.3.0)", "tox (>=3.24.5)"] + +[[package]] +name = "pytest-metadata" +version = "3.1.1" +description = "pytest plugin for test session metadata" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b"}, + {file = "pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (>=3.24.5)"] + [[package]] name = "pytest-xdist" -version = "3.6.1" +version = "3.7.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, - {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, + {file = "pytest_xdist-3.7.0-py3-none-any.whl", hash = "sha256:7d3fbd255998265052435eb9daa4e99b62e6fb9cfb6efd1f858d4d8c0c7f0ca0"}, + {file = "pytest_xdist-3.7.0.tar.gz", hash = "sha256:f9248c99a7c15b7d2f90715df93610353a485827bc06eefb6566d23f6400f126"}, ] [package.dependencies] @@ -370,62 +535,65 @@ testing = ["filelock"] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -434,6 +602,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -451,13 +620,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.25.3" +version = "0.25.7" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, - {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, + {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, + {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, ] [package.dependencies] @@ -466,17 +636,18 @@ requests = ">=2.30.0,<3.0" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "rich" -version = "13.8.1" +version = "14.0.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" +groups = ["main"] files = [ - {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, - {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, ] [package.dependencies] @@ -492,6 +663,7 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -503,6 +675,7 @@ version = "0.12.5" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, @@ -516,33 +689,35 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.11" -content-hash = "11dc2395310eafb764eefef1ed8366521219265f9820bb2c19be9ad210137ef0" +content-hash = "e527a86daab17d16ce5cc60269f55b4490d3fbf8663400acba9309e0f6e90daf" diff --git a/pyproject.toml b/pyproject.toml index 5e4b047..594e942 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ requests = ">= 2.32.3, < 3" pyyaml = ">= 6.0.1, < 7" urllib3 = ">= 2.2.2" typer = "^0.12.5" +click = "<8.2" [tool.poetry.scripts] cortex = "cortexapps_cli.cortex:cli" @@ -34,6 +35,7 @@ pytest-cov = "^5.0.0" [tool.poetry.group.dev.dependencies] pytest-xdist = "^3.6.1" responses = "^0.25.3" +pytest-html = "^4.1.1" [build-system] requires = ["poetry-core"] diff --git a/tests/plugins.json b/tests/plugins.json new file mode 100644 index 0000000..7996fcd --- /dev/null +++ b/tests/plugins.json @@ -0,0 +1,298 @@ +{ + "total": 10, + "page": 0, + "totalPages": 1, + "plugins": [ + { + "tag": "github-deploys", + "name": "GitHub Releases", + "description": null, + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": true, + "types": [ + "service" + ] + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": { + "id": 2800, + "tenantId": 1, + "oauthUserId": "google-oauth2|105763048987093518818", + "name": "David Barnes", + "email": "david.barnes@cortex.io", + "dateCreated": "2022-05-16T16:38:00.288436", + "lastLogin": null + }, + "proxyTag": "github-releases", + "lastUpdated": "2023-06-08T16:17:56.433869" + }, + { + "tag": "techradar", + "name": "TechRadar", + "description": null, + "contexts": [ + { + "type": "GLOBAL" + } + ], + "minimumRoleRequired": "USER", + "isDraft": false, + "createdBy": { + "id": 2351, + "tenantId": 1, + "oauthUserId": "google-oauth2|100446023106570979548", + "name": "Mike Moore", + "email": "mike.moore@cortex.io", + "dateCreated": "2022-03-21T00:08:56.574619", + "lastLogin": null + }, + "proxyTag": null, + "lastUpdated": "2023-10-08T20:11:16.702748" + }, + { + "tag": "cli-installation-and-configuration", + "name": "Installation and Configuration", + "description": "Demonstrates how to install the Cortex Command Line Interface (CLI) from MacOS homebrew.", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": true, + "types": [ + "service" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'cli'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-06-05T20:18:41.825891" + }, + { + "tag": "project-workflows-create-jira-ticket", + "name": "Create Jira Ticket", + "description": "Showcases creating a Jira ticket using Cortex Workflows. Please don't share externally.", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'project-workflows'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-06-08T17:28:16.397023" + }, + { + "tag": "xero-go-live-6-18", + "name": "💪 Xero Go-Live 6-18", + "description": null, + "contexts": [ + { + "type": "GLOBAL" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": { + "id": 9320, + "tenantId": 1, + "oauthUserId": "google-oauth2|115853682294872999618", + "name": "Jeff Schnitter", + "email": "jeff.schnitter@cortex.io", + "dateCreated": "2023-07-24T15:08:04.185523", + "lastLogin": "2025-01-08T23:39:41.901175" + }, + "proxyTag": null, + "lastUpdated": "2024-06-14T22:08:13.791927" + }, + { + "tag": "jeff-schnitter-i-melt-with-you", + "name": "Favorite Song", + "description": "I Melt With You", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'jeff-schnitter'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-09-25T17:00:33.607194" + }, + { + "tag": "hanna-vigil-the-black-dog", + "name": "Favorite Song", + "description": "The Black Dog", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'hanna-vigil'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-09-30T17:22:20.402398" + }, + { + "tag": "plugin-marketplace", + "name": "Plugin Marketplace", + "description": null, + "contexts": [ + { + "type": "GLOBAL" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": { + "id": 25205, + "tenantId": 1, + "oauthUserId": "google-oauth2|110738514566936908176", + "name": "Martin Stone", + "email": "martin.stone@cortex.io", + "dateCreated": "2024-08-05T16:41:18.839233", + "lastLogin": "2025-01-06T17:33:21.527241" + }, + "proxyTag": null, + "lastUpdated": "2024-11-27T23:13:49.785416" + }, + { + "tag": "aaron-wirick-surf-wax-america", + "name": "Favorite Song", + "description": "Surf Wax America", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'aaron-wirick'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2024-12-06T16:45:45.378944" + }, + { + "tag": "doug-cooper-jealous-(labrinth)", + "name": "Favorite Song", + "description": "Jealous (labrinth)", + "contexts": [ + { + "entityFilter": { + "typeFilter": { + "include": false, + "types": [ + "service", + "domain", + "team" + ] + }, + "cqlFilter": { + "query": "entity.tag() == 'doug-cooper'", + "category": "Generic", + "cqlVersion": "2.0", + "type": "CQL_FILTER" + }, + "type": "COMPOUND_FILTER" + }, + "type": "ENTITY" + } + ], + "minimumRoleRequired": "VIEWER", + "isDraft": false, + "createdBy": null, + "proxyTag": null, + "lastUpdated": "2025-01-08T19:59:46.187193" + } + ] +} diff --git a/tests/test_api_keys.py b/tests/test_api_keys.py new file mode 100644 index 0000000..ae58fc2 --- /dev/null +++ b/tests/test_api_keys.py @@ -0,0 +1,13 @@ +from tests.helpers.utils import * + +def test(): + cli(["api-keys", "create", "-d", "Key created from CLI test", "-n", "CLI Test Key", "-dr", "USER"]) + + response = cli(["api-keys", "list"]) + assert any(key['description'] == 'Key created from CLI test' for key in response['apiKeys']), "Should find key with description 'Key created from CLI test'" + + cid = [key['cid'] for key in response['apiKeys'] if key['description'] == 'Key created from CLI test'][0] + print("cid = " + cid) + response = cli(["api-keys", "get", "-c", cid]) + cli(["api-keys", "update", "-c", cid, "-n", "My new name", "-d", "Update: Key created from CLI test"]) + cli(["api-keys", "delete", "-c", cid]) diff --git a/tests/test_audit_logs.py b/tests/test_audit_logs.py index 4ea6c7c..54ce8b0 100644 --- a/tests/test_audit_logs.py +++ b/tests/test_audit_logs.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): result = cli(["audit-logs", "get"]) assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_dates.py b/tests/test_audit_logs_dates.py index 8adcfe3..d3a5301 100644 --- a/tests/test_audit_logs_dates.py +++ b/tests/test_audit_logs_dates.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): end_date = today() start_date = yesterday() diff --git a/tests/test_audit_logs_end_date.py b/tests/test_audit_logs_end_date.py index e4b2eba..d1c6f9d 100644 --- a/tests/test_audit_logs_end_date.py +++ b/tests/test_audit_logs_end_date.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): end_date = today() result = cli(["audit-logs", "get", "-e", end_date]) diff --git a/tests/test_audit_logs_page.py b/tests/test_audit_logs_page.py index 5fbc317..de69c58 100644 --- a/tests/test_audit_logs_page.py +++ b/tests/test_audit_logs_page.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): result = cli(["audit-logs", "get", "-p", "0"]) assert (len(result['logs']) > 0) diff --git a/tests/test_audit_logs_size.py b/tests/test_audit_logs_size.py index c6f0374..1768d11 100644 --- a/tests/test_audit_logs_size.py +++ b/tests/test_audit_logs_size.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): result = cli(["audit-logs", "get", "-p", "0", "-z", "1"]) assert (len(result['logs']) == 1) diff --git a/tests/test_audit_logs_start_date.py b/tests/test_audit_logs_start_date.py index 1434a28..844ceb8 100644 --- a/tests/test_audit_logs_start_date.py +++ b/tests/test_audit_logs_start_date.py @@ -1,6 +1,6 @@ from tests.helpers.utils import * -@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") +#@pytest.mark.skip(reason="Disabled until CET-15982 is resolved.") def test(): start_date = yesterday() result = cli(["audit-logs", "get", "-s", start_date]) diff --git a/tests/test_catalog_create_entity.py b/tests/test_catalog_create_entity.py index c75b180..1858481 100644 --- a/tests/test_catalog_create_entity.py +++ b/tests/test_catalog_create_entity.py @@ -4,4 +4,5 @@ def test(): cli(["catalog", "create", "-f", "data/run-time/create-entity.yaml"]) response = cli(["catalog", "descriptor", "-t", "create-entity"]) + print(str(response)) assert response['info']['x-cortex-tag'] == "create-entity" diff --git a/tests/test_catalog_list_entity_descriptors_page_size.py b/tests/test_catalog_list_entity_descriptors_page_size.py index 874a0ab..a0ffd67 100644 --- a/tests/test_catalog_list_entity_descriptors_page_size.py +++ b/tests/test_catalog_list_entity_descriptors_page_size.py @@ -1,5 +1,5 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list-descriptors", "-t", "component", "-z", "1"]) + response = cli(["catalog", "list-descriptors", "-t", "component", "-p", "0", "-z", "1"]) assert (len(response['descriptors']) == 1) diff --git a/tests/test_catalog_list_entity_descriptors_yaml.py b/tests/test_catalog_list_entity_descriptors_yaml.py index 81050e8..2e18279 100644 --- a/tests/test_catalog_list_entity_descriptors_yaml.py +++ b/tests/test_catalog_list_entity_descriptors_yaml.py @@ -1,6 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list-descriptors", "-y", "-t", "component"]) + response = cli(["catalog", "list-descriptors", "-y", "--types", "component"]) list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "backend-worker"] + print("list = " + str(list)) assert list[0]['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" diff --git a/tests/test_config_file.py b/tests/test_config_file.py new file mode 100644 index 0000000..27682a4 --- /dev/null +++ b/tests/test_config_file.py @@ -0,0 +1,97 @@ +""" +Tests for the cortex CLI config file +""" + +# These tests are all marked to run in serial order because they make modifications to the +# cortex config file and/or CORTEX_API_KEY value and would potentially impact other tests +# that are running in parallel (with poetry run pytest -n auto), so they are run separately. + +# Additionally, order is VERY IMPORTANT in this file because of the way CORTEX_API key is +# deleted, set to invalid values, etc. Moving test order could impact the overall success +# of pytest. Tread carefully here. +from cortexapps_cli.cortex import cli + +import io +import os +import pytest +import sys +from string import Template + +# Requires user input, so use monkeypatch to set it. +@pytest.fixture(scope="session") +def delete_cortex_api_key(): + if "CORTEX_API_KEY" in os.environ: + del os.environ['CORTEX_API_KEY'] + +@pytest.mark.serial +def test_config_file_api_key_quotes(tmp_path): + cortex_api_key = os.getenv('CORTEX_API_KEY') + f = tmp_path / "cortex_config_api_key_quotes" + template = Template(""" + [default] + api_key = "${cortex_api_key}" + """) + content = template.substitute(cortex_api_key=cortex_api_key) + print(content) + f.write_text(content) + cli(["-c", str(f), "teams", "list"]) + +@pytest.mark.serial +def test_environment_variables(capsys): + cli(["catalog", "list"]) + out, err = capsys.readouterr() + print("ERR = " + err) + assert err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY", "Warning should be displayed by default" + + cli(["-q", "catalog", "list"]) + out, err = capsys.readouterr() + assert not(err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY"), "Warning should be displayed with -q option" + +@pytest.mark.serial +def test_config_file_create(monkeypatch, tmp_path, delete_cortex_api_key): + with pytest.raises(SystemExit) as excinfo: + monkeypatch.setattr('sys.stdin', io.StringIO('Y')) + f = tmp_path / "test-config.txt" + cli(["-c", str(f), "catalog", "list"]) + +@pytest.mark.serial +def test_config_file_new(tmp_path, capsys, delete_cortex_api_key): + f = tmp_path / "cortex_config" + content = """ + [default] + api_key = REPLACE_WITH_YOUR_CORTEX_API_KEY + """ + f.write_text(content) + with pytest.raises(SystemExit) as excinfo: + cli(["-c", str(f), "teams", "list"]) + out, err = capsys.readouterr() + +@pytest.mark.serial +def test_export(capsys, delete_cortex_api_key): + cli(["-t", "rich-sandbox", "backup", "export"]) + out, err = capsys.readouterr() + last_line = out.strip().split("\n")[-1] + sys.stdout.write(out + "\n\n") + sys.stdout.write(last_line + "\n\n") + assert "rich-sandbox" in out + +@pytest.mark.serial +def test_config_file_bad_api_key(tmp_path, capsys, delete_cortex_api_key): + f = tmp_path / "cortex_config_bad_api_key" + content = """ + [default] + api_key = invalidApiKey + """ + f.write_text(content) + with pytest.raises(SystemExit) as excinfo: + cli(["-c", str(f), "teams", "list"]) + out, err = capsys.readouterr() + assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" + +@pytest.mark.serial +def test_environment_variable_invalid_key(capsys): + with pytest.raises(SystemExit) as excinfo: + os.environ["CORTEX_API_KEY"] = "invalidKey" + cli(["teams", "list"]) + out, err = capsys.readouterr() + assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" diff --git a/tests/test_custom_metrics.py b/tests/test_custom_metrics.py index c7f6203..7f77bf8 100644 --- a/tests/test_custom_metrics.py +++ b/tests/test_custom_metrics.py @@ -1,12 +1,16 @@ from tests.helpers.utils import * +# As part of this testing, filed: +# CET-19691: custom metrics POST API returns 200 response for un-processed metrics older than 6 months def test(): + date = today() + print("date = " + str(date)) cli(["custom-metrics", "delete", "-t", "shipping-integrations", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) cli(["custom-metrics", "add", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "3.0"]) result = cli(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) assert result['data'][0]['value'] == 3.0, "should have single value of 3.0" - cli(["custom-metrics", "add-in-bulk", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "2024-07-01T00:00:00=1.0", "-v", "2024-08-01T00:00:00=2.0"]) + cli(["custom-metrics", "add-in-bulk", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", f"{date}=1.0", "-v", f"{date}=2.0"]) result = cli(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) assert result['total'] == 3, "should have total of 3 metrics data points" print("There is not a good way to test this today because there is a pre-requisite that the custom metric already exists.") diff --git a/tests/test_deploys.py b/tests/test_deploys.py index db3136c..3d0ddfa 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -14,17 +14,19 @@ def test_deploys(): result = cli(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) uuid = result['uuid'] + print("uuid = " + uuid) + _add_deploy() result = cli(["deploys", "list", "-t", "shipping-integrations"]) assert any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should find a deploy with uuid" assert result['total'] == 2, "Two deploys should be returned for entity" - cli(["deploys", "update-by-uuid", "-t", "shipping-integrations", "-uu", uuid, "-f", "data/run-time/deploys-update.json"]) + cli(["deploys", "update-by-uuid", "-t", "shipping-integrations", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) result = cli(["deploys", "list", "-t", "shipping-integrations"]) deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" - cli(["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-uu", uuid]) + cli(["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-u", uuid]) result = cli(["deploys", "list", "-t", "shipping-integrations"]) assert not any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should not find a deploy with uuid" assert result['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" diff --git a/tests/test_scim.py b/tests/test_scim.py index 2f96ae7..46b4c0f 100644 --- a/tests/test_scim.py +++ b/tests/test_scim.py @@ -11,7 +11,7 @@ def test(): id = response['Resources'][0]['id'] response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-a", "name.familyName"]) - assert response['Resources'][0]['name']['familyName'] == 'Schnitter', "Should find family Name" + assert 'familyName' in response['Resources'][0]['name'].keys(), "Should find familyName in response" response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-e", "name.familyName"]) assert 'familyName' not in response['Resources'][0]['name'].keys(), "Should not have familyName in response" diff --git a/tests/test_scorecards.py b/tests/test_scorecards.py index a50421f..9431b36 100644 --- a/tests/test_scorecards.py +++ b/tests/test_scorecards.py @@ -73,6 +73,7 @@ def test_scorecards_drafts(): @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) def test_exemption_that_will_be_approved(): rule_id = _get_rule("Has Custom Data") + print("rule_id = " + rule_id) response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test approve", "-ri", rule_id, "-d", "100"]) assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" @@ -80,6 +81,7 @@ def test_exemption_that_will_be_approved(): @pytest.mark.usefixtures('test_exemption_that_will_be_approved') def test_approve_exemption(): rule_id = _get_rule("Has Custom Data") + print("rule_id = " + rule_id) response = cli(["scorecards", "exemptions", "approve", "-s", "test-scorecard", "-t", "test-service", "-ri", rule_id]) assert response['exemptions'][0]['exemptionStatus']['status'] == 'APPROVED', "exemption state should be APPROVED" @@ -90,6 +92,7 @@ def test_approve_exemption(): @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) def test_exemption_that_will_be_denied(): rule_id = _get_rule("Is Definitely False") + print("rule_id = " + rule_id) response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test deny", "-ri", rule_id, "-d", "100"]) assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" @@ -97,6 +100,7 @@ def test_exemption_that_will_be_denied(): @pytest.mark.usefixtures('test_exemption_that_will_be_denied') def test_deny_exemption(): rule_id = _get_rule("Is Definitely False") + print("rule_id = " + rule_id) response = cli(["scorecards", "exemptions", "deny", "-s", "test-scorecard", "-t", "test-service", "-r", "I deny, therefore I am", "-ri", rule_id]) assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" diff --git a/tests/test_stdin.py b/tests/test_stdin.py new file mode 100644 index 0000000..d1d0e03 --- /dev/null +++ b/tests/test_stdin.py @@ -0,0 +1,11 @@ +""" +Tests for stdin input. +""" +import subprocess + +def test_stdin_input(capsys): + cat_process = subprocess.Popen(['cat', 'data/run-time/create-entity.yaml'], stdout=subprocess.PIPE) + cortex_process = subprocess.Popen(['cortexapps_cli/cortex.py', 'catalog', 'create','-f-'],stdin=cat_process.stdout, stdout=subprocess.PIPE) + out, err = cortex_process.communicate() + rc=cortex_process.wait() + assert rc == 0, "catalog test with stdin should succeed" From 082b0591f123c235ce6c20e4e1d1b3057fd818fa Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Thu, 5 Jun 2025 19:03:57 -0700 Subject: [PATCH 48/56] Remove need to load data before tests. --- .github/CODEOWNERS | 2 +- HISTORY.md | 20 ++- Justfile | 64 ++++----- cortexapps_cli/.cortex.py.swp | Bin 0 -> 16384 bytes cortexapps_cli/cli.py | 2 + cortexapps_cli/commands/backup.py | 63 ++++++--- cortexapps_cli/commands/catalog.py | 1 - cortexapps_cli/commands/entity_types.py | 31 ++++- cortexapps_cli/commands/groups.py | 39 ++++-- cortexapps_cli/commands/initiatives.py | 112 +++++++++++++++ .../commands/integrations_commands/aws.py | 4 +- .../integrations_commands/azure_devops.py | 2 +- .../integrations_commands/azure_resources.py | 4 +- .../integrations_commands/circleci.py | 2 +- .../integrations_commands/coralogix.py | 2 +- .../commands/integrations_commands/datadog.py | 2 +- .../commands/integrations_commands/github.py | 2 +- .../commands/integrations_commands/gitlab.py | 2 +- .../integrations_commands/incidentio.py | 2 +- .../integrations_commands/launchdarkly.py | 2 +- .../integrations_commands/newrelic.py | 2 +- .../integrations_commands/pagerduty.py | 2 +- .../integrations_commands/prometheus.py | 2 +- .../integrations_commands/sonarqube.py | 2 +- cortexapps_cli/commands/packages.py | 19 +-- cortexapps_cli/commands/scorecards.py | 4 +- data/catalog/ach-payments-nacha.yaml | 47 ------- data/catalog/admin-customer-support.yaml | 40 ------ data/catalog/alerting-stock-service.yaml | 39 ------ data/catalog/api-australia.yaml | 17 --- data/catalog/api-back-profiles.yaml | 17 --- data/catalog/api-bi-systems.yaml | 17 --- data/catalog/api-business-analytics.yaml | 17 --- data/catalog/api-business-development.yaml | 17 --- data/catalog/api-business-innovation.yaml | 17 --- data/catalog/api-business-model.yaml | 17 --- data/catalog/api-business-operations.yaml | 17 --- data/catalog/api-business-plan.yaml | 17 --- data/catalog/api-business-process.yaml | 17 --- data/catalog/api-business-systems.yaml | 17 --- data/catalog/api-business-technology.yaml | 17 --- data/catalog/api-business-transformation.yaml | 17 --- data/catalog/api-caribbean.yaml | 17 --- data/catalog/api-central-america.yaml | 17 --- data/catalog/api-circle-cicd.yaml | 17 --- data/catalog/api-code-search.yaml | 17 --- data/catalog/api-cold-storage.yaml | 17 --- data/catalog/api-credit-card.yaml | 17 --- data/catalog/api-datacenter-monitors.yaml | 17 --- data/catalog/api-design-file-updates.yaml | 17 --- data/catalog/api-dev-pros.yaml | 17 --- data/catalog/api-europe.yaml | 17 --- data/catalog/api-feature-services.yaml | 17 --- .../api-infrastructure-management.yaml | 17 --- .../api-infrastructure-monitoring.yaml | 17 --- data/catalog/api-infrastructure-planning.yaml | 17 --- data/catalog/api-infrastructure-security.yaml | 17 --- data/catalog/api-infrastructure-services.yaml | 17 --- data/catalog/api-infrastructure-strategy.yaml | 17 --- data/catalog/api-na-west.yaml | 17 --- data/catalog/api-network-telemetry.yaml | 17 --- data/catalog/api-networking-hardware.yaml | 17 --- .../api-networking-infrastructure.yaml | 17 --- data/catalog/api-networking-protocol.yaml | 17 --- data/catalog/api-networking-services.yaml | 17 --- data/catalog/api-networking-software.yaml | 17 --- data/catalog/api-networking-solutions.yaml | 17 --- data/catalog/api-oceania.yaml | 17 --- data/catalog/api-polar-regions.yaml | 17 --- data/catalog/api-premium-logins.yaml | 17 --- data/catalog/api-profile-integrations.yaml | 17 --- data/catalog/api-quarterly-reports.yaml | 17 --- data/catalog/api-security-health-status.yaml | 17 --- data/catalog/api-south-america-region.yaml | 17 --- .../catalog/api-standards-and-compliance.yaml | 17 --- data/catalog/api-transactions-api.yaml | 17 --- data/catalog/api-user-services.yaml | 17 --- data/catalog/auth-team.yaml | 27 ---- data/catalog/authentication.yaml | 19 --- data/catalog/autocomplete-parser.yaml | 38 ----- data/catalog/autocomplete.yaml | 41 ------ data/catalog/checkout.yaml | 18 --- .../credit-card-transaction-processor.yaml | 39 ------ data/catalog/data-calculation-batch-job.yaml | 39 ------ data/catalog/data-ingestion.yaml | 23 ---- data/catalog/data.yaml | 31 ----- data/catalog/engineering-dev-cluster.yaml | 17 --- data/catalog/experience.yaml | 26 ---- data/catalog/feed-calculator-job.yaml | 38 ----- data/catalog/fraud-analyzer.yaml | 39 ------ data/catalog/identity.yaml | 19 --- data/catalog/image-recognition-pipeline.yaml | 39 ------ data/catalog/image-store-bucket.yaml | 21 --- data/catalog/import-engine.yaml | 17 --- data/catalog/inventory-scraper.yaml | 39 ------ data/catalog/inventory-team.yaml | 33 ----- data/catalog/inventory.yaml | 20 --- data/catalog/logistics-team.yaml | 24 ---- data/catalog/model-innovation-team.yaml | 27 ---- data/catalog/new-item-fanout-service.yaml | 42 ------ data/catalog/oauth2-identity-service.yaml | 39 ------ data/catalog/order-management.yaml | 19 --- data/catalog/orders-events.yaml | 20 --- data/catalog/packaging-recommender.yaml | 39 ------ data/catalog/payments-team.yaml | 17 --- data/catalog/payments.yaml | 22 --- data/catalog/profile-management.yaml | 39 ------ data/catalog/profile-pictures.yaml | 21 --- data/catalog/profile-team.yaml | 33 ----- data/catalog/profiles.yaml | 22 --- data/catalog/query-analyzer.yaml | 39 ------ data/catalog/recommendation-engine-kafka.yaml | 20 --- data/catalog/recommendation-engine.yaml | 39 ------ data/catalog/recommendations.yaml | 21 --- data/catalog/result-cacher-postgres.yaml | 21 --- data/catalog/retail-image-labeler.yaml | 39 ------ data/catalog/returns-processor.yaml | 39 ------ data/catalog/robot-item-sorter.yaml | 39 ------ data/catalog/search-experience.yaml | 29 ---- data/catalog/search.yaml | 20 --- data/catalog/shipping-integrations.yaml | 39 ------ data/catalog/sso-integration.yaml | 39 ------ data/catalog/stock-level-analyzer.yaml | 39 ------ data/catalog/team-a.yaml | 15 -- data/catalog/team-b.yaml | 15 -- data/catalog/team-c.yaml | 15 -- data/catalog/team-d.yaml | 13 -- data/catalog/transaction-store.yaml | 39 ------ data/catalog/transactions-squid-proxy.yaml | 19 --- .../user-profile-metadata-service-bucket.yaml | 21 --- .../user-profile-metadata-service.yaml | 41 ------ data/catalog/usps-api-client.yaml | 38 ----- data/catalog/warehousing.yaml | 19 --- data/resource-definitions/api.json | 11 -- data/resource-definitions/component.json | 9 -- data/resource-definitions/k8s-cluster.json | 9 -- data/resource-definitions/kafka-topic.json | 25 ---- data/resource-definitions/squid-proxy.json | 24 ---- data/run-time/archive-entity.yaml | 5 +- .../create-entity-type-empty-schema.json | 6 +- data/run-time/create-entity.yaml | 5 +- data/run-time/custom-data-bulk.json | 4 +- data/run-time/delete-entity.yaml | 5 +- data/run-time/dependencies-bulk.json | 4 +- data/run-time/test-domain-child.yaml | 9 ++ data/run-time/test-domain-parent.yaml | 9 ++ .../test-service-callee.yaml} | 27 +--- data/run-time/test-service-caller.yaml | 7 + data/run-time/test-service-group-1.yaml | 8 ++ data/run-time/test-service-group-2.yaml | 8 ++ data/run-time/test-service-links.yaml | 12 ++ data/run-time/test-service-metadata.yaml | 10 ++ data/run-time/test-service-test-team-1.yaml | 11 ++ data/run-time/test-service-test-team-2.yaml | 11 ++ data/run-time/test-service.yaml | 12 ++ data/run-time/test-team-1.yaml | 12 ++ data/run-time/test-team-2.yaml | 12 ++ data/run-time/test-team-child.yaml | 12 ++ data/run-time/test-team-parent.yaml | 14 ++ data/run-time/unarchive-entity.yaml | 5 +- .../update-entity-type-empty-schema.json | 4 +- data/scorecards/dumb.yaml | 28 ---- data/scorecards/production-readiness.yaml | 28 ---- pyproject.toml | 3 +- tests.orig/custom-events-configure.json | 10 -- tests.orig/custom-events.json | 10 -- .../catalog/cli-test-service-with-groups.yaml | 13 -- tests.orig/data/catalog/cli-test-service.yaml | 71 ---------- tests.orig/data/catalog/test-service.yaml | 83 ----------- tests.orig/data/catalog/test-team-1.yaml | 10 -- tests.orig/data/catalog/test-team-2.yaml | 10 -- .../test_dependencies_dependency_service.yaml | 13 -- .../data/json/resource-definitions.json | 13 -- .../data/json/resource_definitions.json | 0 .../data/json/test-resource-definition.json | 1 - .../test-resource-definition.json | 1 - .../data/scorecards/test-scorecard-draft.yaml | 19 --- .../data/scorecards/test-scorecard.yaml | 19 --- tests.orig/dependency-update.json | 7 - tests.orig/test-custom-data-array.json | 16 --- tests.orig/test-custom-data-bulk.json | 24 ---- tests.orig/test-custom-data.json | 5 - .../test-resource-definition-update.json | 5 - tests.orig/test-resource-definition.json | 6 - tests.orig/test_audit_logs.py | 32 ----- tests.orig/test_backup.py | 11 -- .../catalog/test-service-import-2.yaml | 5 - .../catalog/test-service-import.yaml | 5 - .../test_backup_export/json/ip-allowlist.json | 1 - .../test-resource-definition.json | 1 - .../scorecards/test-scorecard.yaml | 19 --- .../test_backup_export/teams/test-team-3.json | 23 ---- tests.orig/test_catalog-invalid-service.yaml | 8 -- tests.orig/test_catalog.py | 107 -------------- tests.orig/test_catalog_create_service.yaml | 5 - tests.orig/test_command_line_opts.py | 33 ----- tests.orig/test_config_file.py | 98 ------------- tests.orig/test_custom_data.py | 18 --- tests.orig/test_custom_events.py | 24 ---- tests.orig/test_dependencies.json | 7 - tests.orig/test_dependencies.py | 23 ---- tests.orig/test_dependencies_bulk.json | 16 --- tests.orig/test_dependencies_update.json | 7 - tests.orig/test_deploys.json | 14 -- tests.orig/test_deploys.py | 32 ----- tests.orig/test_deploys_update.json | 14 -- tests.orig/test_deploys_uuid.json | 14 -- tests.orig/test_discovery_audit.py | 17 --- tests.orig/test_docs.py | 11 -- tests.orig/test_docs.yaml | 42 ------ tests.orig/test_gitops_logs.py | 17 --- tests.orig/test_groups.py | 14 -- tests.orig/test_integrations_aws.py | 55 -------- tests.orig/test_integrations_aws_config.json | 12 -- .../test_integrations_azure_resources.py | 68 --------- tests.orig/test_integrations_coralogix.py | 69 ---------- tests.orig/test_integrations_github.py | 77 ----------- .../test_integrations_github_update.json | 4 - tests.orig/test_integrations_gitlab.py | 43 ------ ...test_integrations_gitlab_add_multiple.json | 20 --- .../test_integrations_gitlab_update.json | 8 -- tests.orig/test_integrations_incidentio.py | 69 ---------- tests.orig/test_integrations_launchdarkly.py | 69 ---------- tests.orig/test_integrations_newrelic.py | 69 ---------- tests.orig/test_integrations_pagerduty.py | 26 ---- tests.orig/test_integrations_prometheus.py | 72 ---------- tests.orig/test_integrations_sonarqube.py | 74 ---------- tests.orig/test_invalid-service.yaml | 8 -- tests.orig/test_ip_allowlist.py | 44 ------ tests.orig/test_ip_allowlist_empty.json | 4 - tests.orig/test_ip_allowlist_invalid.json | 8 -- tests.orig/test_packages.py | 35 ----- tests.orig/test_packages_go.sum | 2 - tests.orig/test_packages_java_multiple.json | 10 -- tests.orig/test_packages_java_single.json | 4 - tests.orig/test_packages_node_package.json | 22 --- .../test_packages_node_package_lock.json | 17 --- tests.orig/test_packages_node_yarn.lock | 19 --- tests.orig/test_packages_nuget.csproj | 22 --- .../test_packages_nuget_packages_lock.json | 19 --- tests.orig/test_packages_python_pipfile.lock | 59 -------- .../test_packages_python_requirements.txt | 8 -- tests.orig/test_plugins.json | 20 --- tests.orig/test_plugins.py | 21 --- tests.orig/test_plugins_update.json | 18 --- tests.orig/test_queries.json | 3 - tests.orig/test_queries.py | 67 --------- tests.orig/test_queries.txt | 1 - tests.orig/test_resource_definitions.py | 21 --- tests.orig/test_scorecards.py | 35 ----- tests.orig/test_scorecards.yaml | 21 --- tests.orig/test_scorecards_draft.yaml | 19 --- tests.orig/test_stdin.py | 11 -- tests.orig/test_teams.py | 49 ------- tests.orig/test_teams.yaml | 42 ------ tests.orig/test_teams_update.json | 20 --- tests.orig2/__init__.py | 0 tests.orig2/common.py | 62 --------- tests.orig2/cortex_github.py | 130 ------------------ tests.orig2/feature_flag_check.py | 34 ----- tests.orig2/feature_flag_dump.py | 23 ---- tests.orig2/github_setup.py | 3 - tests.orig2/test_audit_logs.py | 5 - tests.orig2/test_audit_logs_dates.py | 7 - tests.orig2/test_audit_logs_end_date.py | 6 - tests.orig2/test_audit_logs_page.py | 5 - tests.orig2/test_audit_logs_size.py | 5 - tests.orig2/test_audit_logs_start_date.py | 6 - tests.orig2/test_catalog_archive_entity.py | 10 -- tests.orig2/test_catalog_create_entity.py | 9 -- .../test_catalog_create_entity_viewer.py | 11 -- tests.orig2/test_catalog_delete_entity.py | 16 --- .../test_catalog_get_entity_details.py | 5 - ...st_catalog_get_entity_details_hierarchy.py | 6 - tests.orig2/test_catalog_include_links.py | 8 -- tests.orig2/test_catalog_include_metadata.py | 8 -- .../test_catalog_include_nested_fields.py | 8 -- tests.orig2/test_catalog_invalid_page_size.py | 9 -- .../test_catalog_list_by_github_repo.py | 5 - .../test_catalog_list_by_group_multiple.py | 5 - .../test_catalog_list_by_group_single.py | 5 - .../test_catalog_list_by_owners_multiple.py | 5 - .../test_catalog_list_by_owners_single.py | 5 - tests.orig2/test_catalog_list_by_types.py | 5 - .../test_catalog_list_entity_descriptors.py | 6 - ...st_catalog_list_entity_descriptors_page.py | 5 - ...talog_list_entity_descriptors_page_size.py | 5 - ...st_catalog_list_entity_descriptors_yaml.py | 6 - .../test_catalog_list_include_archived.py | 8 -- .../test_catalog_list_include_owners.py | 5 - tests.orig2/test_catalog_list_page.py | 5 - tests.orig2/test_catalog_list_page_size.py | 5 - ...test_catalog_retrieve_entity_descriptor.py | 5 - ...catalog_retrieve_entity_descriptor_yaml.py | 5 - .../test_catalog_retrieve_entity_details.py | 5 - ...etrieve_entity_details_hierarchy_fields.py | 5 - ...atalog_retrieve_entity_scorecard_scores.py | 7 - tests.orig2/test_catalog_unarchive_entity.py | 13 -- tests.orig2/test_config_file.py | 104 -------------- ...st_custom_data_create_or_update_in_bulk.py | 12 -- tests.orig2/test_custom_data_delete.py | 12 -- tests.orig2/test_custom_data_list.py | 6 - tests.orig2/test_custom_events_list.py | 15 -- tests.orig2/test_custom_events_uuid.py | 26 ---- tests.orig2/test_docs.py | 18 --- tests.orig2/test_entity_types.py | 26 ---- tests.orig2/test_github.py | 19 --- tests.orig2/test_groups.py | 14 -- tests.orig2/test_ip_allowlist.py | 49 ------- tests.orig2/test_packages.py | 28 ---- tests.orig2/test_packages_java.py | 6 - tests.orig2/test_plugins.py | 23 ---- tests.orig2/test_plugins_invalid.py | 18 --- tests.orig2/test_scorecards.py | 30 ---- tests.orig2/test_scorecards_drafts.py | 11 -- tests.orig2/test_teams.py | 5 - {tests.orig => tests}/test-groups.json | 0 tests/test_catalog_create_entity.py | 7 +- tests/test_catalog_get_entity_details.py | 6 +- ...st_catalog_get_entity_details_hierarchy.py | 9 +- tests/test_catalog_include_links.py | 3 +- tests/test_catalog_include_metadata.py | 4 +- tests/test_catalog_include_nested_fields.py | 9 +- tests/test_catalog_list_by_group_multiple.py | 5 +- tests/test_catalog_list_by_group_single.py | 4 +- tests/test_catalog_list_by_owners_multiple.py | 7 +- tests/test_catalog_list_by_owners_single.py | 5 +- tests/test_catalog_list_by_types.py | 6 +- tests/test_catalog_list_entity_descriptors.py | 9 +- ...st_catalog_list_entity_descriptors_page.py | 8 +- ...talog_list_entity_descriptors_page_size.py | 4 +- ...st_catalog_list_entity_descriptors_yaml.py | 7 +- tests/test_catalog_list_include_archived.py | 11 +- ...test_catalog_retrieve_entity_descriptor.py | 7 +- ...catalog_retrieve_entity_descriptor_yaml.py | 6 +- tests/test_catalog_retrieve_entity_details.py | 6 +- ...etrieve_entity_details_hierarchy_fields.py | 6 +- tests/{conftest.py => test_conftest.py} | 0 ...st_custom_data_create_or_update_in_bulk.py | 4 +- tests/test_custom_data_delete.py | 10 +- tests/test_custom_data_list.py | 4 +- tests/test_custom_events_list.py | 12 +- tests/test_custom_events_uuid.py | 16 ++- tests/test_custom_metrics.py | 13 +- tests/test_dependencies.py | 17 ++- tests/test_deploys.py | 27 ++-- tests/test_entity_types.py | 24 ++-- tests/test_groups_input_file.py | 16 +++ tests/test_integrations_aws.py | 4 +- tests/test_integrations_azure_devops.py | 4 +- tests/test_integrations_azure_resources.py | 4 +- tests/test_integrations_circleci.py | 4 +- tests/test_integrations_coralogix.py | 4 +- tests/test_integrations_datadog.py | 4 +- tests/test_integrations_github.py | 4 +- tests/test_integrations_gitlab.py | 4 +- tests/test_integrations_incidentio.py | 4 +- tests/test_integrations_launchdarkly.py | 4 +- tests/test_integrations_newrelic.py | 4 +- tests/test_integrations_pagerduty.py | 4 +- tests/test_integrations_prometheus.py | 4 +- tests/test_integrations_sonarqube.py | 4 +- 362 files changed, 646 insertions(+), 6198 deletions(-) create mode 100644 cortexapps_cli/.cortex.py.swp create mode 100644 cortexapps_cli/commands/initiatives.py delete mode 100644 data/catalog/ach-payments-nacha.yaml delete mode 100644 data/catalog/admin-customer-support.yaml delete mode 100644 data/catalog/alerting-stock-service.yaml delete mode 100644 data/catalog/api-australia.yaml delete mode 100644 data/catalog/api-back-profiles.yaml delete mode 100644 data/catalog/api-bi-systems.yaml delete mode 100644 data/catalog/api-business-analytics.yaml delete mode 100644 data/catalog/api-business-development.yaml delete mode 100644 data/catalog/api-business-innovation.yaml delete mode 100644 data/catalog/api-business-model.yaml delete mode 100644 data/catalog/api-business-operations.yaml delete mode 100644 data/catalog/api-business-plan.yaml delete mode 100644 data/catalog/api-business-process.yaml delete mode 100644 data/catalog/api-business-systems.yaml delete mode 100644 data/catalog/api-business-technology.yaml delete mode 100644 data/catalog/api-business-transformation.yaml delete mode 100644 data/catalog/api-caribbean.yaml delete mode 100644 data/catalog/api-central-america.yaml delete mode 100644 data/catalog/api-circle-cicd.yaml delete mode 100644 data/catalog/api-code-search.yaml delete mode 100644 data/catalog/api-cold-storage.yaml delete mode 100644 data/catalog/api-credit-card.yaml delete mode 100644 data/catalog/api-datacenter-monitors.yaml delete mode 100644 data/catalog/api-design-file-updates.yaml delete mode 100644 data/catalog/api-dev-pros.yaml delete mode 100644 data/catalog/api-europe.yaml delete mode 100644 data/catalog/api-feature-services.yaml delete mode 100644 data/catalog/api-infrastructure-management.yaml delete mode 100644 data/catalog/api-infrastructure-monitoring.yaml delete mode 100644 data/catalog/api-infrastructure-planning.yaml delete mode 100644 data/catalog/api-infrastructure-security.yaml delete mode 100644 data/catalog/api-infrastructure-services.yaml delete mode 100644 data/catalog/api-infrastructure-strategy.yaml delete mode 100644 data/catalog/api-na-west.yaml delete mode 100644 data/catalog/api-network-telemetry.yaml delete mode 100644 data/catalog/api-networking-hardware.yaml delete mode 100644 data/catalog/api-networking-infrastructure.yaml delete mode 100644 data/catalog/api-networking-protocol.yaml delete mode 100644 data/catalog/api-networking-services.yaml delete mode 100644 data/catalog/api-networking-software.yaml delete mode 100644 data/catalog/api-networking-solutions.yaml delete mode 100644 data/catalog/api-oceania.yaml delete mode 100644 data/catalog/api-polar-regions.yaml delete mode 100644 data/catalog/api-premium-logins.yaml delete mode 100644 data/catalog/api-profile-integrations.yaml delete mode 100644 data/catalog/api-quarterly-reports.yaml delete mode 100644 data/catalog/api-security-health-status.yaml delete mode 100644 data/catalog/api-south-america-region.yaml delete mode 100644 data/catalog/api-standards-and-compliance.yaml delete mode 100644 data/catalog/api-transactions-api.yaml delete mode 100644 data/catalog/api-user-services.yaml delete mode 100644 data/catalog/auth-team.yaml delete mode 100644 data/catalog/authentication.yaml delete mode 100644 data/catalog/autocomplete-parser.yaml delete mode 100644 data/catalog/autocomplete.yaml delete mode 100644 data/catalog/checkout.yaml delete mode 100644 data/catalog/credit-card-transaction-processor.yaml delete mode 100644 data/catalog/data-calculation-batch-job.yaml delete mode 100644 data/catalog/data-ingestion.yaml delete mode 100644 data/catalog/data.yaml delete mode 100644 data/catalog/engineering-dev-cluster.yaml delete mode 100644 data/catalog/experience.yaml delete mode 100644 data/catalog/feed-calculator-job.yaml delete mode 100644 data/catalog/fraud-analyzer.yaml delete mode 100644 data/catalog/identity.yaml delete mode 100644 data/catalog/image-recognition-pipeline.yaml delete mode 100644 data/catalog/image-store-bucket.yaml delete mode 100644 data/catalog/import-engine.yaml delete mode 100644 data/catalog/inventory-scraper.yaml delete mode 100644 data/catalog/inventory-team.yaml delete mode 100644 data/catalog/inventory.yaml delete mode 100644 data/catalog/logistics-team.yaml delete mode 100644 data/catalog/model-innovation-team.yaml delete mode 100644 data/catalog/new-item-fanout-service.yaml delete mode 100644 data/catalog/oauth2-identity-service.yaml delete mode 100644 data/catalog/order-management.yaml delete mode 100644 data/catalog/orders-events.yaml delete mode 100644 data/catalog/packaging-recommender.yaml delete mode 100644 data/catalog/payments-team.yaml delete mode 100644 data/catalog/payments.yaml delete mode 100644 data/catalog/profile-management.yaml delete mode 100644 data/catalog/profile-pictures.yaml delete mode 100644 data/catalog/profile-team.yaml delete mode 100644 data/catalog/profiles.yaml delete mode 100644 data/catalog/query-analyzer.yaml delete mode 100644 data/catalog/recommendation-engine-kafka.yaml delete mode 100644 data/catalog/recommendation-engine.yaml delete mode 100644 data/catalog/recommendations.yaml delete mode 100644 data/catalog/result-cacher-postgres.yaml delete mode 100644 data/catalog/retail-image-labeler.yaml delete mode 100644 data/catalog/returns-processor.yaml delete mode 100644 data/catalog/robot-item-sorter.yaml delete mode 100644 data/catalog/search-experience.yaml delete mode 100644 data/catalog/search.yaml delete mode 100644 data/catalog/shipping-integrations.yaml delete mode 100644 data/catalog/sso-integration.yaml delete mode 100644 data/catalog/stock-level-analyzer.yaml delete mode 100644 data/catalog/team-a.yaml delete mode 100644 data/catalog/team-b.yaml delete mode 100644 data/catalog/team-c.yaml delete mode 100644 data/catalog/team-d.yaml delete mode 100644 data/catalog/transaction-store.yaml delete mode 100644 data/catalog/transactions-squid-proxy.yaml delete mode 100644 data/catalog/user-profile-metadata-service-bucket.yaml delete mode 100644 data/catalog/user-profile-metadata-service.yaml delete mode 100644 data/catalog/usps-api-client.yaml delete mode 100644 data/catalog/warehousing.yaml delete mode 100644 data/resource-definitions/api.json delete mode 100644 data/resource-definitions/component.json delete mode 100644 data/resource-definitions/k8s-cluster.json delete mode 100644 data/resource-definitions/kafka-topic.json delete mode 100644 data/resource-definitions/squid-proxy.json create mode 100644 data/run-time/test-domain-child.yaml create mode 100644 data/run-time/test-domain-parent.yaml rename data/{catalog/backend-worker.yaml => run-time/test-service-callee.yaml} (99%) create mode 100644 data/run-time/test-service-caller.yaml create mode 100644 data/run-time/test-service-group-1.yaml create mode 100644 data/run-time/test-service-group-2.yaml create mode 100644 data/run-time/test-service-links.yaml create mode 100644 data/run-time/test-service-metadata.yaml create mode 100644 data/run-time/test-service-test-team-1.yaml create mode 100644 data/run-time/test-service-test-team-2.yaml create mode 100644 data/run-time/test-service.yaml create mode 100644 data/run-time/test-team-1.yaml create mode 100644 data/run-time/test-team-2.yaml create mode 100644 data/run-time/test-team-child.yaml create mode 100644 data/run-time/test-team-parent.yaml delete mode 100644 data/scorecards/dumb.yaml delete mode 100644 data/scorecards/production-readiness.yaml delete mode 100644 tests.orig/custom-events-configure.json delete mode 100644 tests.orig/custom-events.json delete mode 100644 tests.orig/data/catalog/cli-test-service-with-groups.yaml delete mode 100644 tests.orig/data/catalog/cli-test-service.yaml delete mode 100644 tests.orig/data/catalog/test-service.yaml delete mode 100644 tests.orig/data/catalog/test-team-1.yaml delete mode 100644 tests.orig/data/catalog/test-team-2.yaml delete mode 100644 tests.orig/data/catalog/test_dependencies_dependency_service.yaml delete mode 100644 tests.orig/data/json/resource-definitions.json delete mode 100644 tests.orig/data/json/resource_definitions.json delete mode 100644 tests.orig/data/json/test-resource-definition.json delete mode 100644 tests.orig/data/resource-definitions/test-resource-definition.json delete mode 100644 tests.orig/data/scorecards/test-scorecard-draft.yaml delete mode 100644 tests.orig/data/scorecards/test-scorecard.yaml delete mode 100644 tests.orig/dependency-update.json delete mode 100644 tests.orig/test-custom-data-array.json delete mode 100644 tests.orig/test-custom-data-bulk.json delete mode 100644 tests.orig/test-custom-data.json delete mode 100644 tests.orig/test-resource-definition-update.json delete mode 100644 tests.orig/test-resource-definition.json delete mode 100644 tests.orig/test_audit_logs.py delete mode 100644 tests.orig/test_backup.py delete mode 100644 tests.orig/test_backup_export/catalog/test-service-import-2.yaml delete mode 100644 tests.orig/test_backup_export/catalog/test-service-import.yaml delete mode 100644 tests.orig/test_backup_export/json/ip-allowlist.json delete mode 100644 tests.orig/test_backup_export/resource-definitions/test-resource-definition.json delete mode 100644 tests.orig/test_backup_export/scorecards/test-scorecard.yaml delete mode 100644 tests.orig/test_backup_export/teams/test-team-3.json delete mode 100644 tests.orig/test_catalog-invalid-service.yaml delete mode 100644 tests.orig/test_catalog.py delete mode 100644 tests.orig/test_catalog_create_service.yaml delete mode 100644 tests.orig/test_command_line_opts.py delete mode 100644 tests.orig/test_config_file.py delete mode 100644 tests.orig/test_custom_data.py delete mode 100644 tests.orig/test_custom_events.py delete mode 100644 tests.orig/test_dependencies.json delete mode 100644 tests.orig/test_dependencies.py delete mode 100644 tests.orig/test_dependencies_bulk.json delete mode 100644 tests.orig/test_dependencies_update.json delete mode 100644 tests.orig/test_deploys.json delete mode 100644 tests.orig/test_deploys.py delete mode 100644 tests.orig/test_deploys_update.json delete mode 100644 tests.orig/test_deploys_uuid.json delete mode 100644 tests.orig/test_discovery_audit.py delete mode 100644 tests.orig/test_docs.py delete mode 100644 tests.orig/test_docs.yaml delete mode 100644 tests.orig/test_gitops_logs.py delete mode 100644 tests.orig/test_groups.py delete mode 100644 tests.orig/test_integrations_aws.py delete mode 100644 tests.orig/test_integrations_aws_config.json delete mode 100644 tests.orig/test_integrations_azure_resources.py delete mode 100644 tests.orig/test_integrations_coralogix.py delete mode 100644 tests.orig/test_integrations_github.py delete mode 100644 tests.orig/test_integrations_github_update.json delete mode 100644 tests.orig/test_integrations_gitlab.py delete mode 100644 tests.orig/test_integrations_gitlab_add_multiple.json delete mode 100644 tests.orig/test_integrations_gitlab_update.json delete mode 100644 tests.orig/test_integrations_incidentio.py delete mode 100644 tests.orig/test_integrations_launchdarkly.py delete mode 100644 tests.orig/test_integrations_newrelic.py delete mode 100644 tests.orig/test_integrations_pagerduty.py delete mode 100644 tests.orig/test_integrations_prometheus.py delete mode 100644 tests.orig/test_integrations_sonarqube.py delete mode 100644 tests.orig/test_invalid-service.yaml delete mode 100644 tests.orig/test_ip_allowlist.py delete mode 100644 tests.orig/test_ip_allowlist_empty.json delete mode 100644 tests.orig/test_ip_allowlist_invalid.json delete mode 100644 tests.orig/test_packages.py delete mode 100644 tests.orig/test_packages_go.sum delete mode 100644 tests.orig/test_packages_java_multiple.json delete mode 100644 tests.orig/test_packages_java_single.json delete mode 100644 tests.orig/test_packages_node_package.json delete mode 100644 tests.orig/test_packages_node_package_lock.json delete mode 100644 tests.orig/test_packages_node_yarn.lock delete mode 100644 tests.orig/test_packages_nuget.csproj delete mode 100644 tests.orig/test_packages_nuget_packages_lock.json delete mode 100644 tests.orig/test_packages_python_pipfile.lock delete mode 100644 tests.orig/test_packages_python_requirements.txt delete mode 100644 tests.orig/test_plugins.json delete mode 100644 tests.orig/test_plugins.py delete mode 100644 tests.orig/test_plugins_update.json delete mode 100644 tests.orig/test_queries.json delete mode 100644 tests.orig/test_queries.py delete mode 100644 tests.orig/test_queries.txt delete mode 100644 tests.orig/test_resource_definitions.py delete mode 100644 tests.orig/test_scorecards.py delete mode 100644 tests.orig/test_scorecards.yaml delete mode 100644 tests.orig/test_scorecards_draft.yaml delete mode 100644 tests.orig/test_stdin.py delete mode 100644 tests.orig/test_teams.py delete mode 100644 tests.orig/test_teams.yaml delete mode 100644 tests.orig/test_teams_update.json delete mode 100644 tests.orig2/__init__.py delete mode 100644 tests.orig2/common.py delete mode 100644 tests.orig2/cortex_github.py delete mode 100644 tests.orig2/feature_flag_check.py delete mode 100644 tests.orig2/feature_flag_dump.py delete mode 100644 tests.orig2/github_setup.py delete mode 100644 tests.orig2/test_audit_logs.py delete mode 100644 tests.orig2/test_audit_logs_dates.py delete mode 100644 tests.orig2/test_audit_logs_end_date.py delete mode 100644 tests.orig2/test_audit_logs_page.py delete mode 100644 tests.orig2/test_audit_logs_size.py delete mode 100644 tests.orig2/test_audit_logs_start_date.py delete mode 100644 tests.orig2/test_catalog_archive_entity.py delete mode 100644 tests.orig2/test_catalog_create_entity.py delete mode 100644 tests.orig2/test_catalog_create_entity_viewer.py delete mode 100644 tests.orig2/test_catalog_delete_entity.py delete mode 100644 tests.orig2/test_catalog_get_entity_details.py delete mode 100644 tests.orig2/test_catalog_get_entity_details_hierarchy.py delete mode 100644 tests.orig2/test_catalog_include_links.py delete mode 100644 tests.orig2/test_catalog_include_metadata.py delete mode 100644 tests.orig2/test_catalog_include_nested_fields.py delete mode 100644 tests.orig2/test_catalog_invalid_page_size.py delete mode 100644 tests.orig2/test_catalog_list_by_github_repo.py delete mode 100644 tests.orig2/test_catalog_list_by_group_multiple.py delete mode 100644 tests.orig2/test_catalog_list_by_group_single.py delete mode 100644 tests.orig2/test_catalog_list_by_owners_multiple.py delete mode 100644 tests.orig2/test_catalog_list_by_owners_single.py delete mode 100644 tests.orig2/test_catalog_list_by_types.py delete mode 100644 tests.orig2/test_catalog_list_entity_descriptors.py delete mode 100644 tests.orig2/test_catalog_list_entity_descriptors_page.py delete mode 100644 tests.orig2/test_catalog_list_entity_descriptors_page_size.py delete mode 100644 tests.orig2/test_catalog_list_entity_descriptors_yaml.py delete mode 100644 tests.orig2/test_catalog_list_include_archived.py delete mode 100644 tests.orig2/test_catalog_list_include_owners.py delete mode 100644 tests.orig2/test_catalog_list_page.py delete mode 100644 tests.orig2/test_catalog_list_page_size.py delete mode 100644 tests.orig2/test_catalog_retrieve_entity_descriptor.py delete mode 100644 tests.orig2/test_catalog_retrieve_entity_descriptor_yaml.py delete mode 100644 tests.orig2/test_catalog_retrieve_entity_details.py delete mode 100644 tests.orig2/test_catalog_retrieve_entity_details_hierarchy_fields.py delete mode 100644 tests.orig2/test_catalog_retrieve_entity_scorecard_scores.py delete mode 100644 tests.orig2/test_catalog_unarchive_entity.py delete mode 100644 tests.orig2/test_config_file.py delete mode 100644 tests.orig2/test_custom_data_create_or_update_in_bulk.py delete mode 100644 tests.orig2/test_custom_data_delete.py delete mode 100644 tests.orig2/test_custom_data_list.py delete mode 100644 tests.orig2/test_custom_events_list.py delete mode 100644 tests.orig2/test_custom_events_uuid.py delete mode 100644 tests.orig2/test_docs.py delete mode 100644 tests.orig2/test_entity_types.py delete mode 100644 tests.orig2/test_github.py delete mode 100644 tests.orig2/test_groups.py delete mode 100644 tests.orig2/test_ip_allowlist.py delete mode 100644 tests.orig2/test_packages.py delete mode 100644 tests.orig2/test_packages_java.py delete mode 100644 tests.orig2/test_plugins.py delete mode 100644 tests.orig2/test_plugins_invalid.py delete mode 100644 tests.orig2/test_scorecards.py delete mode 100644 tests.orig2/test_scorecards_drafts.py delete mode 100644 tests.orig2/test_teams.py rename {tests.orig => tests}/test-groups.json (100%) rename tests/{conftest.py => test_conftest.py} (100%) create mode 100644 tests/test_groups_input_file.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f503b36..865712d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,4 +5,4 @@ # the repo. Unless a later match takes precedence, # @global-owner1 and @global-owner2 will be requested for # review when someone opens a pull request. -* @jeff-schnitter @rich-jay +* @jeff-schnitter diff --git a/HISTORY.md b/HISTORY.md index f915dc3..45b9153 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -9,16 +9,24 @@ Release History **Breaking Changes** - custom-events -i changed to -ts -- groups -> no support for adding groups with JSON file, add groups via comma-separated list on command line -- plugins get changed to-> list, +- plugins get changed to -> list, +- plugins get-by-tag changed to -> get +- resource-definitions -> entity-types + delete -ty -> delete -t **TODO** - Do a full reconciliation of all flags - DONE: Add -debug flag -- Test input files, env vars -- Add support for adding groups via JSON file? -- Check all get/list sub-commands -- sort by date: TypeError: '<' not supported between instances of 'NoneType' and 'NoneType' +- DONE: Test input files, env vars +- DONE: Add support for adding groups via JSON file? +- DONE: Check all get/list sub-commands + could change get-all to list, but prefer to make no change +- backup export -> don't include cloud entities +- backup import -> need to complete +- deleting existing entity types -> would be good to loop over entity types with a certain filter + base initially on name prefix +- warning about using env vars + diff --git a/Justfile b/Justfile index 3a50f16..9269661 100644 --- a/Justfile +++ b/Justfile @@ -1,49 +1,33 @@ -cortex_cli := 'poetry run cortex2' -cortex_cli_orig := 'poetry run cortex -q' +cortex_cli := 'poetry run cortex' + +export CORTEX_API_KEY := env('CORTEX_API_KEY') +export CORTEX_BASE_URL := env('CORTEX_BASE_URL', "https://api.getcortexapp.com") +export CORTEX_API_KEY_VIEWER := env('CORTEX_API_KEY_VIEWER') help: @just -l -_check-vars: - #!/bin/bash - if [ -z ${CORTEX_API_KEY+x} ] - then - echo "CORTEX_API_KEY environment variable is not set." - exit 1 - fi - - if [ -z ${CORTEX_BASE_URL+x} ] - then - echo "CORTEX_BASE_URL environment variable is not set." - exit - fi - # Run all tests -test-all: _check-vars load-data test-parallel test-serial +test-all: test-parallel test-serial # Run tests that can run in parallel -test-parallel: _check-vars load-data +test-parallel: PYTHONPATH=. poetry run pytest -rA -n auto -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests +# Run all tests serially - helpful to see if any tests seem to be hanging +_test-all-serial: + PYTHONPATH=. poetry run pytest -rA -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests + # Run tests that have to run sequentially -test-serial: _check-vars load-data - #@if [ -f .coverage ]; then rm .coverage; fi +test-serial: + @if [ -f .coverage ]; then rm .coverage; fi PYTHONPATH=. poetry run pytest -rA -n auto -m "serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests # Run a single test, ie: just test tests/test_catalog.py -test testname: _check-vars +test testname: poetry run pytest {{testname}} -# Run all tests for an API function, assumes all tests named test_* -test-suite command: _check-vars - poetry run pytest -k test_{{command}} - -# Run all tests in a directory -test-dir dir: _check-vars - poetry run pytest {{dir}} - -# Load data from 'data' directory into Cortex -load-data: +_load-data: #!/bin/bash if [[ -f .load-data-done ]] then @@ -53,17 +37,19 @@ load-data: # Delete existing entity definitions and any entities to prevent getting a conflict error. # TODO: modify cli import to add a flag to manage this - for resource_file in `ls data/resource-definitions`; do - resource=$(basename ${resource_file} .json) - {{cortex_cli_orig}} catalog delete-by-type -t ${resource} - ({{cortex_cli_orig}} resource-definitions get -t ${resource} && {{cortex_cli_orig}} resource-definitions delete -t ${resource}) || : - {{cortex_cli_orig}} resource-definitions create -f data/resource-definitions/${resource_file} + for entity_type_file in `ls -1 data/entity-types/*`; do + entity_type=$(basename ${entity_type_file} .json) + echo "Deleting entity type: ${entity_type}" + # Delete all instances of this type + {{cortex_cli}} catalog delete-by-type -t ${entity_type} + # Now delete the type if it exists + ({{cortex_cli}} entity-types get -t ${entity_type} && {{cortex_cli}} entity-types delete -t ${entity_type}) || : done - {{cortex_cli_orig}} backup import -d data + {{cortex_cli}} backup import -d data # Archive a couple of entities in order to test commands that include or exclude archived entities - {{cortex_cli_orig}} catalog archive -t robot-item-sorter - {{cortex_cli_orig}} catalog archive -t inventory-scraper + {{cortex_cli}} catalog archive -t robot-item-sorter + {{cortex_cli}} catalog archive -t inventory-scraper touch .load-data-done diff --git a/cortexapps_cli/.cortex.py.swp b/cortexapps_cli/.cortex.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..55b56103e4e5d535ffef1fe502d5447437ba2745 GIT binary patch literal 16384 zcmeHOYm6jS6>c8B5L7TxfB3lc5W6QkT|K)CY}nAsIt;R~?5wi`%diVYb=B?e+NrLp zt;fu?D@(v93sK@D{y_){CjKGDAH<+B{82-qgv9*;EFdo*ynP@(5YX@3I#u0OOpjst zVMwZzZ>GAwd(S!d+;eZ;b84nNecAR)#5vl=3O*mGRPOrrueyhCdv*Ht^D347veOC8 zo@+;u42U1|xx_F(<9SPsU4f@Lo@qE^+8xRXY^h@7$c>; zU3#GOz_EGYl*;B!8>gx7MD=v>!P721HZf(d(gURjN)MDCC_PYmp!7iLfzkt|2i}$* zi2MzeYhn5EZ2c1dKCbZlKm6(P{WQl<+r7?w#Qc#$|JRtmk?((&JEr|#W&S$me^cQ9 z&3w%KZZXn}v4 z`OBC;^F4X_e`J0S^M?!k9}4=PDDb~$ei!#Y=cK&+-!Z?N`5OxSQw9Bx6!<5ZPwn6Q z-n{%Lm`~^bh64Y1LI0x#{xRmSX8Wd3&ddKT^J)CvT;LyNKDF<;0{;l}SF!wUr{v{7 z%zSG9Z3X@)^BwO0Qh|Snd5ihQ_vPh3$b4%5&kFnl1^FL&f4)C&2NjLwA1d&CyZlvp zp!7iLfzkt|2TBi=9wzdG)Blv&|Jb--UF5p(+dcXzxz^TCTz;VE1$ccUqTnFp`&IFDFUPLZ-1h^Tv7}yM) z0ZafV1NR{pyao6g@KqoHE&@&lo<|<~W8hBUOF$3k0v7@&0Y{MsKLp$l+zxyJI0#$- z%mSwZ&mb57IuHWq11AE%ME-vh5CA)Y4*^diiTEk-4PXFJ4dMdeanv%t3tS5v02YDE zfmbkRG;hzQMU3?$X+oHu+p)W%V>?o-Oo;6rF*7&6FnhVaZP#}F;@L$Z-DNxQ+`e=p zv1|mk(RQS;Lt)8|?MiD(tk{ktTrU!4Pnt`j;{`&l+F>MJQwpym4CtW0=BQM}$R`|x z8k$A*>B;QAbV9i``$mn|YDM?`z;>g0719S@9EcgEK)__NBnQH9EfMvkD*_n>gQSV+ z_4~N5RfpeDttsmFl>Exjo6!>!Gx;oDkQM%f?l zXmEOk+KztENT*3^iG!@-n#w3o*Lx9MM(jja=9sky4M7O5XeBUw&5{RJV-Cm?V3v*A$ijF__(f(RZ}a=t$l146EK5hN4<_D#VST8){^gMpHT^YC*d; zDGV&<#J9=LqHH3&)|NaJf^}-uClf_*5SbYbd|A(0OirrVJlAxLFcdZ2_Ot~A;r6*b z2t!{6wNGJE$!#P_r}68Z!0YQY79}P15LrVX_EbkpR9DnZ{8(d%<3>kaCWPJhu^A@X z+J?Cl`@)Bfhy>cm$VKE9gSWH_b)paJ&jvVZ$FX~WBu|JL^<01yrF{kJl|>dA5=NHo z4n3s;i;NicPxcVlG@N=~4GY-bDUi@*dGGXhnn_{9Nj~|Rp>JPtYt27>zJGkVdHv3 z7!h(T!=Ub%gxpXz4YwYmlPIXrC4JCZITlvY-qq--%`11Sq1>F8atsbwd(Oy)w=i!! zgxr8JcYHey85$W56S5l^k&S&C3s*IvW+VPceSsh=`XarR_=5KLQQ3{*Pq@FxG$cib7Xvn~Nnh?7@I-J;s;|!+g zR3uEp6&`E}um;rrKsrRSsYRYJ<1q62_UDPS!>Q~!-ZyJ0EYZzy5K6A^)jqNSdvN8d~E`ZNmZ)Hm9p~K~VXvQYh z4&n%#mKWe0wqgtfY{DVV=;8nx(P?!jPMu*K;A@A>rwiKKOh+274AL%{F~il3ldN(T z=0L2_Ns+wKGnR4Gi`#4^B;g#1?~RdrTJFKd)0o8zf5$^B*@hjvtzkDY=y{_tS+8a1 zBlXA4wPaJ1tETiLF{^%&S=by$4#Pvq{s5cSerZN~tC8n9)hqW_eUHw$bj8i8`GZr| zh0aP|-yyTcKTE4wbB!8Tab^tK)zPOZlySf%VO&?y)%JqGTcP7Eo^)BP;4iv+aa+ZT zKZtss+eoZ!@-I6KV;MF#ZP{F@BquQ#dGMM_CGnto`lr`yd-m1t%j}wufa!Ovri!uy za|eH!5|3$?7{qScDv$@DJPOnAUUv1y`Agf-!#usVz3j@Rb4`@!q%|19IKdjz?+L3; z>#GqaA>{+CrvSZ?Q&?Ucr7{zDBNsde+f>?jF0c^y*7feL!!1DhDLqhnp!7iLfzktSYY$Lb zlBEjx-#98$;Pk8ORQS+!N=|CJ-ZyMl*K3^0;E9jwNbH(P=|LS=QK#c-4i|f-C~LtX zA3w9K;SF^i)pbI(#4+tH$RJ&ikR~{kqv&)m$+DH2^|Be&AYY+`wCeoqt{vNEX7xSW z7cSHn=PsSs^Tq6Ha(bsTp~{N%NE-c6_xvcVhkkMpFOof521a1w*Vu+3PV~|=0&8pw z)b&uOHl?nr=C(S)jZ=#wX|Rqm--)}nJ601;dP_)<#`35{;lDARamsC92kWgkDBzE6 zGm~cWW4VUPVxz$+B)%9b(`eO(clfHszta-w+VY}A9!d^|OV?b} zZ`Jq$w-PIp7J@b~YD909Q2iE%n#daI#S#7+m@4#{1e7v8u`d;~uLePsWaH&j3-2+M zXV$jp09s;%2Vz^2L5cR=vF~GfhNu<V7XTYXtq?>!31Z|!FTaL`<&;=J zy%NRiJc#l#{;eHV`4*O+3RP|1 zSfLjP7h;j37>4(7Xfu_Gwq$4qwoe{u)%Xn%SE&T5ZYVThOAb0n@@vCRD0_5UTx=3} WU&K_hG~mxn_cq=GcByA#z4Bj8F&L%* literal 0 HcmV?d00001 diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index 9845a44..d5ed821 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -24,6 +24,7 @@ import cortexapps_cli.commands.entity_types as entity_types import cortexapps_cli.commands.gitops_logs as gitops_logs import cortexapps_cli.commands.groups as groups +import cortexapps_cli.commands.initiatives as initiatives import cortexapps_cli.commands.integrations as integrations import cortexapps_cli.commands.ip_allowlist as ip_allowlist import cortexapps_cli.commands.on_call as on_call @@ -57,6 +58,7 @@ app.add_typer(entity_types.app, name="entity-types") app.add_typer(gitops_logs.app, name="gitops-logs") app.add_typer(groups.app, name="groups") +app.add_typer(initiatives.app, name="initiatives") app.add_typer(integrations.app, name="integrations") app.add_typer(ip_allowlist.app, name="ip-allowlist") app.add_typer(on_call.app, name="on-call") diff --git a/cortexapps_cli/commands/backup.py b/cortexapps_cli/commands/backup.py index dcc5df3..2e8f6ff 100644 --- a/cortexapps_cli/commands/backup.py +++ b/cortexapps_cli/commands/backup.py @@ -199,9 +199,8 @@ def export( ): """ Export tenant - - """ + export_types = sorted(list(set(export_types))) client = ctx.obj["client"] @@ -233,42 +232,66 @@ def _import_ip_allowlist(directory): print(" Importing: " + filename) ip_allowlist.get(ctx, file=file_path, force=False, _print=False) -def _import_entity_types(directory): +def _import_entity_types(ctx, force, directory): if os.path.isdir(directory): - print("FOUND: " + directory) + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + entity_types.create(ctx, file_input=open(file_path), force=force) -def _import_catalog(directory): +def _import_catalog(ctx, directory): if os.path.isdir(directory): - print("FOUND: " + directory) + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + catalog.create(ctx, file_input=open(file_path)) -def _import_plugins(directory): +def _import_plugins(ctx, directory): if os.path.isdir(directory): - print("FOUND: " + directory) + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + plugins.create(ctx, file_input=open(file_path)) -def _import_scorecards(directory): +def _import_scorecards(ctx, directory): if os.path.isdir(directory): - print("FOUND: " + directory) + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + scorecards.create(ctx, file_input=open(file_path), dry_run=False) -def _import_workflows(directory): +def _import_workflows(ctx, directory): if os.path.isdir(directory): - print("FOUND: " + directory) + print("Processing: " + directory) + for filename in sorted(os.listdir(directory)): + file_path = os.path.join(directory, filename) + if os.path.isfile(file_path): + print(" Importing: " + filename) + workflows.create(ctx, file_input=open(file_path)) @app.command("import") def import_tenant( ctx: typer.Context, directory: str = typer.Option(..., "--directory", "-d", help="Location of import directory."), + force: bool = typer.Option(False, "--force", help="Recreate entities if they already exist."), ): """ Import data into tenant - """ + client = ctx.obj["client"] - print("import directory = " + directory) _import_ip_allowlist(directory + "/ip-allowlist") - _import_entity_types(directory + "/entity-types") - _import_catalog(directory + "/catalog") - _import_plugins(directory + "/plugins") - _import_scorecards(directory + "/scorecards") - _import_workflows(directory + "/workflows") - + _import_entity_types(ctx, force, directory + "/entity-types") + _import_catalog(ctx, directory + "/catalog") + _import_plugins(ctx, directory + "/plugins") + _import_scorecards(ctx, directory + "/scorecards") + _import_workflows(ctx, directory + "/workflows") diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index cf15272..cb8d9a7 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -189,7 +189,6 @@ def archive( client = ctx.obj["client"] r = client.put("api/v1/catalog/" + tag + "/archive") - print_output_with_context(ctx, r) @app.command() def unarchive( diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py index 7aec9bd..51830a5 100644 --- a/cortexapps_cli/commands/entity_types.py +++ b/cortexapps_cli/commands/entity_types.py @@ -77,6 +77,7 @@ def delete( def create( ctx: typer.Context, file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom entity definition; can be passed as stdin with -, example: -f-")] = None, + force: bool = typer.Option(False, "--force", help="Recreate entity if it already exists."), ): """ Create entity type @@ -85,13 +86,34 @@ def create( client = ctx.obj["client"] data = json.loads("".join([line for line in file_input])) - r = client.post("api/v1/catalog/definitions/" + entity_type) - print_json(data=r) + entity_type = data['type'] + entities = list(ctx=ctx, _print=False, include_built_in=False) + + # Check if any definition has type == 'tool-test' + exists = any(entity.get('type') == entity_type for entity in entities.get('definitions', [])) + if entities is None or not exists: + client.post("api/v1/catalog/definitions", data=data) + +@app.command() +def update( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom entity definition; can be passed as stdin with -, example: -f-")] = None, + entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), +): + """ + Update entity type + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) + + r = client.update("api/v1/catalog/definitions/" + entity_type, data=data) @app.command() def get( ctx: typer.Context, entity_type: str = typer.Option(..., "--type", "-t", help="The entity type"), + _print: CommandOptions._print = True, ): """ Retrieve entity type @@ -100,4 +122,7 @@ def get( client = ctx.obj["client"] r = client.get("api/v1/catalog/definitions/" + entity_type) - print_json(data=r) + if _print: + print_json(data=r) + else: + return r diff --git a/cortexapps_cli/commands/groups.py b/cortexapps_cli/commands/groups.py index 0f1a4a7..31a87db 100644 --- a/cortexapps_cli/commands/groups.py +++ b/cortexapps_cli/commands/groups.py @@ -1,6 +1,7 @@ import json from rich import print_json import typer +from typing_extensions import Annotated app = typer.Typer(help="Groups commands", no_args_is_help=True) @@ -32,7 +33,8 @@ def get( def add( ctx: typer.Context, tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), - groups: str = typer.Option(..., "--groups", "-g", help="Comma-delimited list of groups to add to the entity") + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + groups: str = typer.Option(None, "--groups", "-g", help="Comma-delimited list of groups to add to the entity") ): """ Add groups to entity. @@ -40,9 +42,18 @@ def add( client = ctx.obj["client"] - data = { - "groups": [{"tag": x.strip()} for x in groups.split(',')] - } + if file_input and groups: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if not file_input and not groups: + raise typer.BadParameter("Only one of --file-input or --groups is required") + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + data = { + "groups": [{"tag": x.strip()} for x in groups.split(',')] + } r = client.put("api/v1/catalog/" + tag_or_id + "/groups", data=data) print_json(data=r) @@ -51,7 +62,8 @@ def add( def delete( ctx: typer.Context, tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), - groups: str = typer.Option(..., "--groups", "-g", help="Comma-delimited list of groups to delete from the entity") + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing keys to update; can be passed as stdin with -, example: -f-")] = None, + groups: str = typer.Option(None, "--groups", "-g", help="Comma-delimited list of groups to delete from the entity") ): """ Delete groups from entity. @@ -59,8 +71,17 @@ def delete( client = ctx.obj["client"] - data = { - "groups": [{"tag": x.strip()} for x in groups.split(',')] - } - + if file_input and groups: + raise typer.BadParameter("Only one of --table and --csv can be specified") + + if not file_input and not groups: + raise typer.BadParameter("Only one of --file-input or --groups is required") + + if file_input: + data = json.loads("".join([line for line in file_input])) + else: + data = { + "groups": [{"tag": x.strip()} for x in groups.split(',')] + } + r = client.delete("api/v1/catalog/" + tag_or_id + "/groups", data=data) diff --git a/cortexapps_cli/commands/initiatives.py b/cortexapps_cli/commands/initiatives.py new file mode 100644 index 0000000..aed431f --- /dev/null +++ b/cortexapps_cli/commands/initiatives.py @@ -0,0 +1,112 @@ +import json +from rich import print_json +import typer +from typing_extensions import Annotated +from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.command_options import ListCommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output + +app = typer.Typer( + help="Initiatives commands", + no_args_is_help=True +) + +@app.command() +def create( + ctx: typer.Context, + input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing YAML representation of scorecard, can be passed as stdin with -, example: -f-")] = None, + dry_run: bool = typer.Option(False, "--dry-run", "-d", help="When true, this endpoint only validates the descriptor contents and returns any errors or warnings"), +): + """ + Create or update a Scorecard using the descriptor YAML. The operation is determined by the existence of a Scorecard with the same tag as passed in the descriptor. + """ + + client = ctx.obj["client"] + + params = { + "dryRun": dry_run + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + client.post("api/v1/scorecards/descriptor", params=params, data=input.read(), content_type="application/yaml;charset=UTF-8") + +@app.command() +def delete( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="Unique Cortex ID for the initiative"), +): + """ + Delete initiative. API key must have the Edit Initiatives permission. + """ + + client = ctx.obj["client"] + + r = client.delete("api/v1/initiatives/" + cid) + +@app.command() +def list( + ctx: typer.Context, + include_drafts: bool = typer.Option(False, "--include-drafts", "-d", help="Whether scorecard in draft mode should be included"), + include_expired: bool = typer.Option(False, "--include-expired", "-e", help="Whether scorecard in draft mode should be included"), + _print: CommandOptions._print = True, + page: ListCommandOptions.page = None, + page_size: ListCommandOptions.page_size = 250, + table_output: ListCommandOptions.table_output = False, + csv_output: ListCommandOptions.csv_output = False, + columns: ListCommandOptions.columns = [], + filters: ListCommandOptions.filters = [], + sort: ListCommandOptions.sort = [], +): + """ + List initiatives + """ + + client = ctx.obj["client"] + + params = { + "page": page, + "pageSize": page_size, + "includeDrafts": include_drafts, + "includeExpired": include_expired + } + + # remove any params that are None + params = {k: v for k, v in params.items() if v is not None} + + if (table_output or csv_output) and not ctx.params.get('columns'): + ctx.params['columns'] = [ + "CId=cid", + "Name=name", + "Description=description", + "TargetDate=targetDate", + "ScorecardTag=scorecardtag", + "ScorecardName=scorecardName", + "IsDraft=isDraft", + ] + + if page is None: + r = client.fetch("api/v1/initiatives", params=params) + else: + r = client.get("api/v1/initiatives", params=params) + + if _print: + data = r + print_output_with_context(ctx, data) + else: + return(r) + +@app.command() +def get( + ctx: typer.Context, + cid: str = typer.Option(..., "--cid", "-c", help="Unique Cortex ID for the initiative"), +): + """ + Get initiative. API key must have the View Initiatives permission. + """ + + client = ctx.obj["client"] + + r = client.get("api/v1/initiatives/" + cid) + print_json(data=r) diff --git a/cortexapps_cli/commands/integrations_commands/aws.py b/cortexapps_cli/commands/integrations_commands/aws.py index a3aba2e..4f94a29 100644 --- a/cortexapps_cli/commands/integrations_commands/aws.py +++ b/cortexapps_cli/commands/integrations_commands/aws.py @@ -92,8 +92,8 @@ def get( r = client.get("api/v1/aws/configurations/" + accountId) print_json(data=r) -@app.command() -def get_all( +@app.command("list") +def aws_list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/azure_devops.py b/cortexapps_cli/commands/integrations_commands/azure_devops.py index 5134acd..510e9ae 100644 --- a/cortexapps_cli/commands/integrations_commands/azure_devops.py +++ b/cortexapps_cli/commands/integrations_commands/azure_devops.py @@ -100,7 +100,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/azure_resources.py b/cortexapps_cli/commands/integrations_commands/azure_resources.py index c023bb3..47021eb 100644 --- a/cortexapps_cli/commands/integrations_commands/azure_resources.py +++ b/cortexapps_cli/commands/integrations_commands/azure_resources.py @@ -125,8 +125,8 @@ def get( r = client.get("api/v1/azure-resources/configuration/" + alias) print_json(data=r) -@app.command() -def get_all( +@app.command("list") +def azure_resources_list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/circleci.py b/cortexapps_cli/commands/integrations_commands/circleci.py index 3a85095..48ca1f0 100644 --- a/cortexapps_cli/commands/integrations_commands/circleci.py +++ b/cortexapps_cli/commands/integrations_commands/circleci.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/coralogix.py b/cortexapps_cli/commands/integrations_commands/coralogix.py index 124cd11..e23f659 100644 --- a/cortexapps_cli/commands/integrations_commands/coralogix.py +++ b/cortexapps_cli/commands/integrations_commands/coralogix.py @@ -105,7 +105,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/datadog.py b/cortexapps_cli/commands/integrations_commands/datadog.py index 105f595..2ebea20 100644 --- a/cortexapps_cli/commands/integrations_commands/datadog.py +++ b/cortexapps_cli/commands/integrations_commands/datadog.py @@ -104,7 +104,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/github.py b/cortexapps_cli/commands/integrations_commands/github.py index ab7ab83..da2a4ea 100644 --- a/cortexapps_cli/commands/integrations_commands/github.py +++ b/cortexapps_cli/commands/integrations_commands/github.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/gitlab.py b/cortexapps_cli/commands/integrations_commands/gitlab.py index 2e3da64..ba7408c 100644 --- a/cortexapps_cli/commands/integrations_commands/gitlab.py +++ b/cortexapps_cli/commands/integrations_commands/gitlab.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/incidentio.py b/cortexapps_cli/commands/integrations_commands/incidentio.py index ad36b1e..4f13227 100644 --- a/cortexapps_cli/commands/integrations_commands/incidentio.py +++ b/cortexapps_cli/commands/integrations_commands/incidentio.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/launchdarkly.py b/cortexapps_cli/commands/integrations_commands/launchdarkly.py index 2711590..eefca99 100644 --- a/cortexapps_cli/commands/integrations_commands/launchdarkly.py +++ b/cortexapps_cli/commands/integrations_commands/launchdarkly.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/newrelic.py b/cortexapps_cli/commands/integrations_commands/newrelic.py index 9a59ed2..eed0a4a 100644 --- a/cortexapps_cli/commands/integrations_commands/newrelic.py +++ b/cortexapps_cli/commands/integrations_commands/newrelic.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/pagerduty.py b/cortexapps_cli/commands/integrations_commands/pagerduty.py index 64993b3..1774206 100644 --- a/cortexapps_cli/commands/integrations_commands/pagerduty.py +++ b/cortexapps_cli/commands/integrations_commands/pagerduty.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/prometheus.py b/cortexapps_cli/commands/integrations_commands/prometheus.py index feb4685..9623b31 100644 --- a/cortexapps_cli/commands/integrations_commands/prometheus.py +++ b/cortexapps_cli/commands/integrations_commands/prometheus.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/integrations_commands/sonarqube.py b/cortexapps_cli/commands/integrations_commands/sonarqube.py index b9260d4..81b160a 100644 --- a/cortexapps_cli/commands/integrations_commands/sonarqube.py +++ b/cortexapps_cli/commands/integrations_commands/sonarqube.py @@ -96,7 +96,7 @@ def get( print_json(data=r) @app.command() -def get_all( +def list( ctx: typer.Context, ): """ diff --git a/cortexapps_cli/commands/packages.py b/cortexapps_cli/commands/packages.py index 9090772..56a9ccd 100644 --- a/cortexapps_cli/commands/packages.py +++ b/cortexapps_cli/commands/packages.py @@ -24,8 +24,6 @@ def list( ctx: typer.Context, tag_or_id: str = typer.Option(..., "--tag-or-id", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), - page: ListCommandOptions.page = None, - page_size: ListCommandOptions.page_size = 250, table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], @@ -38,14 +36,6 @@ def list( client = ctx.obj["client"] - params = { - "page": page, - "pageSize": page_size - } - - # remove any params that are None - params = {k: v for k, v in params.items() if v is not None} - if (table_output or csv_output) and not ctx.params.get('columns'): ctx.params['columns'] = [ "Id=id", @@ -55,13 +45,8 @@ def list( "DateCreated=dateCreated", ] - if page is None: - # if page is not specified, we want to fetch all pages - r = client.fetch("api/v1/catalog/" + tag_or_id + "/packages", params=params) - else: - # if page is specified, we want to fetch only that page - r = client.get("api/v1/catalog/" + tag_or_id + "/packages", params=params) - + # NOTE: packages list is not paginated, so no if-else that includes client.fetch. + r = client.get("api/v1/catalog/" + tag_or_id + "/packages") print_output_with_context(ctx, r) @app.command() diff --git a/cortexapps_cli/commands/scorecards.py b/cortexapps_cli/commands/scorecards.py index a592239..9ef9897 100644 --- a/cortexapps_cli/commands/scorecards.py +++ b/cortexapps_cli/commands/scorecards.py @@ -17,7 +17,7 @@ @app.command() def create( ctx: typer.Context, - input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing YAML representation of scorecard, can be passed as stdin with -, example: -f-")] = None, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing YAML representation of scorecard, can be passed as stdin with -, example: -f-")] = None, dry_run: bool = typer.Option(False, "--dry-run", "-d", help="When true, this endpoint only validates the descriptor contents and returns any errors or warnings"), ): """ @@ -33,7 +33,7 @@ def create( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.post("api/v1/scorecards/descriptor", params=params, data=input.read(), content_type="application/yaml;charset=UTF-8") + client.post("api/v1/scorecards/descriptor", params=params, data=file_input.read(), content_type="application/yaml;charset=UTF-8") @app.command() def delete( diff --git a/data/catalog/ach-payments-nacha.yaml b/data/catalog/ach-payments-nacha.yaml deleted file mode 100644 index 9c0eb70..0000000 --- a/data/catalog/ach-payments-nacha.yaml +++ /dev/null @@ -1,47 +0,0 @@ -info: - description: null - title: ACH payments NACHA - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (ach-payments-nacha) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 02/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for ach-payments-nacha - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: ach-payments-nacha - x-cortex-groups: - - public-api-test - - public-api-test-group-1 - x-cortex-type: service - x-cortex-slack: - channels: - - name: ach-payments - x-cortex-owners: - - name: payments-team - type: GROUP - provider: CORTEX -openapi: 3.0.0 diff --git a/data/catalog/admin-customer-support.yaml b/data/catalog/admin-customer-support.yaml deleted file mode 100644 index 22b0113..0000000 --- a/data/catalog/admin-customer-support.yaml +++ /dev/null @@ -1,40 +0,0 @@ -info: - description: null - title: Admin customer support - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (admin-customer-support) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 27/Oct/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for admin-customer-support - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: admin-customer-support - x-cortex-groups: - - public-api-test - - public-api-test-group-2 - x-cortex-type: service -openapi: 3.0.0 diff --git a/data/catalog/alerting-stock-service.yaml b/data/catalog/alerting-stock-service.yaml deleted file mode 100644 index 0648773..0000000 --- a/data/catalog/alerting-stock-service.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Alerting stock service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (alerting-stock-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 19/Jan/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for alerting-stock-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: alerting-stock-service - x-cortex-groups: - - public-api-test - x-cortex-type: service -openapi: 3.0.0 diff --git a/data/catalog/api-australia.yaml b/data/catalog/api-australia.yaml deleted file mode 100644 index c6c7703..0000000 --- a/data/catalog/api-australia.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Australia - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Australia) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-australia - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-back-profiles.yaml b/data/catalog/api-back-profiles.yaml deleted file mode 100644 index 4ea8612..0000000 --- a/data/catalog/api-back-profiles.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Back profiles - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Back profiles) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-back-profiles - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-bi-systems.yaml b/data/catalog/api-bi-systems.yaml deleted file mode 100644 index 7debf1b..0000000 --- a/data/catalog/api-bi-systems.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: BI systems - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-BI systems) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-bi-systems - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-analytics.yaml b/data/catalog/api-business-analytics.yaml deleted file mode 100644 index a3d53f5..0000000 --- a/data/catalog/api-business-analytics.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Analytics - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Analytics) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-analytics - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-development.yaml b/data/catalog/api-business-development.yaml deleted file mode 100644 index e3f1aa7..0000000 --- a/data/catalog/api-business-development.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Development - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Development) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-development - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-innovation.yaml b/data/catalog/api-business-innovation.yaml deleted file mode 100644 index e9b9f58..0000000 --- a/data/catalog/api-business-innovation.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Innovation - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Innovation) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-innovation - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-model.yaml b/data/catalog/api-business-model.yaml deleted file mode 100644 index 96507de..0000000 --- a/data/catalog/api-business-model.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Model - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Model) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-model - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-operations.yaml b/data/catalog/api-business-operations.yaml deleted file mode 100644 index 1287519..0000000 --- a/data/catalog/api-business-operations.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Operations - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Operations) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-operations - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-plan.yaml b/data/catalog/api-business-plan.yaml deleted file mode 100644 index b13df15..0000000 --- a/data/catalog/api-business-plan.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Plan - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Plan) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-plan - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-process.yaml b/data/catalog/api-business-process.yaml deleted file mode 100644 index 7f74f11..0000000 --- a/data/catalog/api-business-process.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Process - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Process) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-process - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-systems.yaml b/data/catalog/api-business-systems.yaml deleted file mode 100644 index f0611ab..0000000 --- a/data/catalog/api-business-systems.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Systems - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Systems) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-systems - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-technology.yaml b/data/catalog/api-business-technology.yaml deleted file mode 100644 index 2890492..0000000 --- a/data/catalog/api-business-technology.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Technology - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Technology) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-technology - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-business-transformation.yaml b/data/catalog/api-business-transformation.yaml deleted file mode 100644 index 0173371..0000000 --- a/data/catalog/api-business-transformation.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Business Transformation - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Business Transformation) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-business-transformation - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-caribbean.yaml b/data/catalog/api-caribbean.yaml deleted file mode 100644 index 9b51121..0000000 --- a/data/catalog/api-caribbean.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Caribbean - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Caribbean) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-caribbean - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-central-america.yaml b/data/catalog/api-central-america.yaml deleted file mode 100644 index 972adcc..0000000 --- a/data/catalog/api-central-america.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Central America - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Central America) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-central-america - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-circle-cicd.yaml b/data/catalog/api-circle-cicd.yaml deleted file mode 100644 index d0443c7..0000000 --- a/data/catalog/api-circle-cicd.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Circle CICD - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Circle CICD) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-circle-cicd - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-code-search.yaml b/data/catalog/api-code-search.yaml deleted file mode 100644 index 67c376e..0000000 --- a/data/catalog/api-code-search.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Code search - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Code search) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-codeR-earch - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-cold-storage.yaml b/data/catalog/api-cold-storage.yaml deleted file mode 100644 index 3d8f28f..0000000 --- a/data/catalog/api-cold-storage.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Cold storage - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Cold storage) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-cold-storage - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-credit-card.yaml b/data/catalog/api-credit-card.yaml deleted file mode 100644 index bed76a7..0000000 --- a/data/catalog/api-credit-card.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Credit card - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Credit card) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-credit-card - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-datacenter-monitors.yaml b/data/catalog/api-datacenter-monitors.yaml deleted file mode 100644 index f216a75..0000000 --- a/data/catalog/api-datacenter-monitors.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Datacenter monitors - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Datacenter monitors) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-datacenter-monitors - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-design-file-updates.yaml b/data/catalog/api-design-file-updates.yaml deleted file mode 100644 index 1b46a79..0000000 --- a/data/catalog/api-design-file-updates.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Design file updates - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Design file updates) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-design-file-updates - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-dev-pros.yaml b/data/catalog/api-dev-pros.yaml deleted file mode 100644 index 7000594..0000000 --- a/data/catalog/api-dev-pros.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Dev pros - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Dev pros) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-dev-pros - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-europe.yaml b/data/catalog/api-europe.yaml deleted file mode 100644 index 3e28132..0000000 --- a/data/catalog/api-europe.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Europe - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Europe) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-Europe - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-feature-services.yaml b/data/catalog/api-feature-services.yaml deleted file mode 100644 index 9037c86..0000000 --- a/data/catalog/api-feature-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Feature services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Feature services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-feature-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-management.yaml b/data/catalog/api-infrastructure-management.yaml deleted file mode 100644 index 834da2c..0000000 --- a/data/catalog/api-infrastructure-management.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Management - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Management) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-management - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-monitoring.yaml b/data/catalog/api-infrastructure-monitoring.yaml deleted file mode 100644 index 52b6049..0000000 --- a/data/catalog/api-infrastructure-monitoring.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Monitoring - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Monitoring) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-monitoring - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-planning.yaml b/data/catalog/api-infrastructure-planning.yaml deleted file mode 100644 index dedde29..0000000 --- a/data/catalog/api-infrastructure-planning.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Planning - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Planning) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-planning - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-security.yaml b/data/catalog/api-infrastructure-security.yaml deleted file mode 100644 index 4525ea0..0000000 --- a/data/catalog/api-infrastructure-security.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Security - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Security) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-security - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-services.yaml b/data/catalog/api-infrastructure-services.yaml deleted file mode 100644 index 913968f..0000000 --- a/data/catalog/api-infrastructure-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-infrastructure-strategy.yaml b/data/catalog/api-infrastructure-strategy.yaml deleted file mode 100644 index baf8617..0000000 --- a/data/catalog/api-infrastructure-strategy.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Infrastructure Strategy - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Infrastructure Strategy) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-infrastructure-strategy - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-na-west.yaml b/data/catalog/api-na-west.yaml deleted file mode 100644 index be97d1d..0000000 --- a/data/catalog/api-na-west.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: NA west - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-NA west) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-na-west - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-network-telemetry.yaml b/data/catalog/api-network-telemetry.yaml deleted file mode 100644 index 88ffb3e..0000000 --- a/data/catalog/api-network-telemetry.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Network telemetry - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Network telemetry) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-network-telemetry - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-hardware.yaml b/data/catalog/api-networking-hardware.yaml deleted file mode 100644 index 4a204fa..0000000 --- a/data/catalog/api-networking-hardware.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Hardware - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Hardware) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-hardware - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-infrastructure.yaml b/data/catalog/api-networking-infrastructure.yaml deleted file mode 100644 index 29f290a..0000000 --- a/data/catalog/api-networking-infrastructure.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Infrastructure - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Infrastructure) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-infrastructure - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-protocol.yaml b/data/catalog/api-networking-protocol.yaml deleted file mode 100644 index 864a895..0000000 --- a/data/catalog/api-networking-protocol.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Protocol - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Protocol) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-protocol - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-services.yaml b/data/catalog/api-networking-services.yaml deleted file mode 100644 index 1ffba74..0000000 --- a/data/catalog/api-networking-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-software.yaml b/data/catalog/api-networking-software.yaml deleted file mode 100644 index f631896..0000000 --- a/data/catalog/api-networking-software.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Software - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Software) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-software - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-networking-solutions.yaml b/data/catalog/api-networking-solutions.yaml deleted file mode 100644 index d00d388..0000000 --- a/data/catalog/api-networking-solutions.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Networking Solutions - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Networking Solutions) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-networking-solutions - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-oceania.yaml b/data/catalog/api-oceania.yaml deleted file mode 100644 index ae90f47..0000000 --- a/data/catalog/api-oceania.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Oceania - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Oceania) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-Oceania - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-polar-regions.yaml b/data/catalog/api-polar-regions.yaml deleted file mode 100644 index 839c102..0000000 --- a/data/catalog/api-polar-regions.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Polar Regions - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Polar Regions) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-polar-regions - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-premium-logins.yaml b/data/catalog/api-premium-logins.yaml deleted file mode 100644 index 8b10f39..0000000 --- a/data/catalog/api-premium-logins.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Premium logins - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Premium logins) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-premium-logins - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-profile-integrations.yaml b/data/catalog/api-profile-integrations.yaml deleted file mode 100644 index b5d3cf8..0000000 --- a/data/catalog/api-profile-integrations.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: profile integrations - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-profile integrations) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-profile-integrations - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-quarterly-reports.yaml b/data/catalog/api-quarterly-reports.yaml deleted file mode 100644 index 49f427d..0000000 --- a/data/catalog/api-quarterly-reports.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Quarterly reports - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Quarterly reports) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-quarterly-reports - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-security-health-status.yaml b/data/catalog/api-security-health-status.yaml deleted file mode 100644 index 2224293..0000000 --- a/data/catalog/api-security-health-status.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Security health status - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Security health status) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-security-health-status - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-south-america-region.yaml b/data/catalog/api-south-america-region.yaml deleted file mode 100644 index 5529539..0000000 --- a/data/catalog/api-south-america-region.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: South America Region - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-South America Region) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-south-america-region - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-standards-and-compliance.yaml b/data/catalog/api-standards-and-compliance.yaml deleted file mode 100644 index a3dbfeb..0000000 --- a/data/catalog/api-standards-and-compliance.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Standards and compliance - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Standards and compliance) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-standards-and-compliance - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-transactions-api.yaml b/data/catalog/api-transactions-api.yaml deleted file mode 100644 index b5f140c..0000000 --- a/data/catalog/api-transactions-api.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Transaction API - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-Transaction API) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-transaction-api - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/api-user-services.yaml b/data/catalog/api-user-services.yaml deleted file mode 100644 index 17b7c68..0000000 --- a/data/catalog/api-user-services.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: User services - version: 1.0.0 - x-cortex-links: - - name: Error Logs (api-User services) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: api-user-services - x-cortex-type: api - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/auth-team.yaml b/data/catalog/auth-team.yaml deleted file mode 100644 index 0cf281b..0000000 --- a/data/catalog/auth-team.yaml +++ /dev/null @@ -1,27 +0,0 @@ -info: - description: null - title: Authentication Team - version: 1.0.0 - x-cortex-links: - - name: Authentication Team Onboarding Guide - type: onboarding - - name: Authentication Team Best Practices - type: best_practices - x-cortex-tag: auth-team - x-cortex-groups: - - public-api-test - x-cortex-team: - members: - - email: nikhil.unni@cortex.io - name: Nikhil Unni - notificationsEnabled: true - - email: aditya.bansal@cortex.io - name: Aditya Bansal - notificationsEnabled: true - - email: eyal.foni@cortex.io - name: Eyal Foni - notificationsEnabled: true - - email: jaroslaw.gaworecki@cortex.io - name: Jarosław Gaworecki - notificationsEnabled: true -openapi: 3.0.0 diff --git a/data/catalog/authentication.yaml b/data/catalog/authentication.yaml deleted file mode 100644 index 3ea113f..0000000 --- a/data/catalog/authentication.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Authentication - version: 1.0.0 - x-cortex-links: - - name: Authentication Overview - type: documentation - - name: Authentication Specs - type: specs - - name: Authentication Bug bashes - type: bug_bashes - x-cortex-tag: authentication - x-cortex-type: domain - x-cortex-groups: - - public-api-test - x-cortex-children: - - tag: oauth2-identity-service - - tag: sso-integration -openapi: 3.0.0 diff --git a/data/catalog/autocomplete-parser.yaml b/data/catalog/autocomplete-parser.yaml deleted file mode 100644 index c67c638..0000000 --- a/data/catalog/autocomplete-parser.yaml +++ /dev/null @@ -1,38 +0,0 @@ -info: - description: null - title: Autocomplete parser - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (autocomplete-parser) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 27/Nov/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for autocomplete-parser - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: autocomplete-parser - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/autocomplete.yaml b/data/catalog/autocomplete.yaml deleted file mode 100644 index f1938b9..0000000 --- a/data/catalog/autocomplete.yaml +++ /dev/null @@ -1,41 +0,0 @@ -info: - description: >- - **Autocomplete** is the service used by our [frontend](https://cortex.io) to autocomplete searches made by a user on our site. It utilizes the following frameworks: - * Elasticsearch - * redis - title: Autocomplete - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (autocomplete) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 18/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for autocomplete - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: autocomplete - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/checkout.yaml b/data/catalog/checkout.yaml deleted file mode 100644 index 7d270a0..0000000 --- a/data/catalog/checkout.yaml +++ /dev/null @@ -1,18 +0,0 @@ -info: - description: null - title: Checkout - version: 1.0.0 - x-cortex-links: - - name: Checkout Overview - type: documentation - - name: Checkout Specs - type: specs - - name: Checkout Bug bashes - type: bug_bashes - x-cortex-tag: checkout - x-cortex-type: domain - x-cortex-children: - - tag: payments - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/credit-card-transaction-processor.yaml b/data/catalog/credit-card-transaction-processor.yaml deleted file mode 100644 index 099fb13..0000000 --- a/data/catalog/credit-card-transaction-processor.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Credit card transaction processor - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (credit-card-transaction-processor) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 21/Sep/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for credit-card-transaction-processor - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: credit-card-transaction-processor - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/data-calculation-batch-job.yaml b/data/catalog/data-calculation-batch-job.yaml deleted file mode 100644 index 08cfb3b..0000000 --- a/data/catalog/data-calculation-batch-job.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Data calculator batch job - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (data-calculator-batch-job) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 16/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for data-calculator-batch-job - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: data-calculator-batch-job - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/data-ingestion.yaml b/data/catalog/data-ingestion.yaml deleted file mode 100644 index cb4e6eb..0000000 --- a/data/catalog/data-ingestion.yaml +++ /dev/null @@ -1,23 +0,0 @@ -info: - description: null - title: Data Ingestion - version: 1.0.0 - x-cortex-links: - - name: Data Ingestion Overview - type: documentation - - name: Data Ingestion Specs - type: specs - - name: Data Ingestion Bug bashes - type: bug_bashes - x-cortex-tag: data-ingestion - x-cortex-type: domain - x-cortex-children: - - tag: data-calculator-batch-job - - tag: inventory-scraper - - tag: image-recognition-pipeline - - tag: retail-image-labeler - - tag: image-store-bucket - - tag: orders-events - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/data.yaml b/data/catalog/data.yaml deleted file mode 100644 index 6ed4b6c..0000000 --- a/data/catalog/data.yaml +++ /dev/null @@ -1,31 +0,0 @@ -info: - description: null - title: Data - version: 1.0.0 - x-cortex-links: - - name: Data Onboarding Guide - type: onboarding - - name: Data Best Practices - type: best_practices - x-cortex-tag: data - x-cortex-type: team - x-cortex-team: - groups: - - name: okta-data - provider: OKTA - members: - - email: nikhil.unni@cortex.io - name: Nikhil Unni - notificationsEnabled: true - - email: jennie.chen@cortex.io - name: Jennie Chen - notificationsEnabled: true - - email: eyal.foni@cortex.io - name: Eyal Foni - notificationsEnabled: true - - email: mikolaj.stepniewski@cortex.io - name: Mikołaj Stępniewski - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/engineering-dev-cluster.yaml b/data/catalog/engineering-dev-cluster.yaml deleted file mode 100644 index eb0c2d7..0000000 --- a/data/catalog/engineering-dev-cluster.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: engineering-dev-cluster - version: 1.0.0 - x-cortex-links: - - name: Error Logs (engineering-dev-cluster) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: engineering-dev-cluster - x-cortex-type: k8s-cluster - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/experience.yaml b/data/catalog/experience.yaml deleted file mode 100644 index 3b84222..0000000 --- a/data/catalog/experience.yaml +++ /dev/null @@ -1,26 +0,0 @@ -info: - description: null - title: Experience - version: 1.0.0 - x-cortex-links: - - name: Experience Overview - type: documentation - - name: Experience Specs - type: specs - - name: Experience Bug bashes - type: bug_bashes - x-cortex-tag: experience - x-cortex-type: domain - x-cortex-owners: - - name: search-experience - type: GROUP - provider: CORTEX - x-cortex-children: - - tag: autocomplete-parser - - tag: autocomplete - - tag: results-cacher - - tag: result-cacher-postgres - - tag: query-analyzer - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/feed-calculator-job.yaml b/data/catalog/feed-calculator-job.yaml deleted file mode 100644 index 9e2ccba..0000000 --- a/data/catalog/feed-calculator-job.yaml +++ /dev/null @@ -1,38 +0,0 @@ -info: - description: null - title: Feed calculator job - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (feed-calculator-job) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 17/Nov/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for feed-calculator-job - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: feed-calculator-job - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/fraud-analyzer.yaml b/data/catalog/fraud-analyzer.yaml deleted file mode 100644 index 919ad1a..0000000 --- a/data/catalog/fraud-analyzer.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Fraud analyzer - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (fraud-analyzer) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 06/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for fraud-analyzer - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: fraud-analyzer - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/identity.yaml b/data/catalog/identity.yaml deleted file mode 100644 index 91ba56a..0000000 --- a/data/catalog/identity.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Identity - version: 1.0.0 - x-cortex-links: - - name: Identity Overview - type: documentation - - name: Identity Specs - type: specs - - name: Identity Bug bashes - type: bug_bashes - x-cortex-tag: identity - x-cortex-type: domain - x-cortex-children: - - tag: profiles - - tag: authentication - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/image-recognition-pipeline.yaml b/data/catalog/image-recognition-pipeline.yaml deleted file mode 100644 index 40b0b1a..0000000 --- a/data/catalog/image-recognition-pipeline.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Image recognition pipeline - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (image-recognition-pipeline) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 10/Apr/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for image-recognition-pipeline - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: image-recognition-pipeline - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/image-store-bucket.yaml b/data/catalog/image-store-bucket.yaml deleted file mode 100644 index 271b5e9..0000000 --- a/data/catalog/image-store-bucket.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: Image Bucket Store - version: 1.0.0 - x-cortex-links: - - name: Error Logs (image-store-bucket) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: image-store-bucket - x-cortex-type: s3 - x-cortex-definition: - containsPii: true - engineVersion: 5.7.mysql_aurora - region: us-east-1 - versioned: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/import-engine.yaml b/data/catalog/import-engine.yaml deleted file mode 100644 index 654664b..0000000 --- a/data/catalog/import-engine.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - description: "" - title: Import Engine - version: 1.0.0 - x-cortex-links: - - name: Error Logs (import-engine) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: import-engine - x-cortex-type: component - x-cortex-definition: {} - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/inventory-scraper.yaml b/data/catalog/inventory-scraper.yaml deleted file mode 100644 index bdce69f..0000000 --- a/data/catalog/inventory-scraper.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Inventory scraper - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (inventory-scraper) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 23/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for inventory-scraper - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: inventory-scraper - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/inventory-team.yaml b/data/catalog/inventory-team.yaml deleted file mode 100644 index d606c33..0000000 --- a/data/catalog/inventory-team.yaml +++ /dev/null @@ -1,33 +0,0 @@ -info: - description: null - title: Inventory Team - version: 1.0.0 - x-cortex-links: - - name: Inventory Team Onboarding Guide - type: onboarding - - name: Inventory Team Best Practices - type: best_practices - x-cortex-tag: inventory-team - x-cortex-team: - groups: - - name: okta-inventory-team - provider: OKTA - members: - - email: greg.pett@cortex.io - name: Greg Pett - notificationsEnabled: true - - email: andrew.si@cortex.io - name: Andrew Si - notificationsEnabled: true - - email: tyler.ackerson@cortex.io - name: Tyler Ackerson - notificationsEnabled: true - - email: wojciech.garncarz@cortex.io - name: Wojciech Garncarz - notificationsEnabled: true - - email: lukasz.blaszczyk@cortex.io - name: Lukasz Blaszczyk - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/inventory.yaml b/data/catalog/inventory.yaml deleted file mode 100644 index 876ebc4..0000000 --- a/data/catalog/inventory.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Inventory - version: 1.0.0 - x-cortex-links: - - name: Inventory Overview - type: documentation - - name: Inventory Specs - type: specs - - name: Inventory Bug bashes - type: bug_bashes - x-cortex-tag: inventory - x-cortex-type: domain - x-cortex-children: - - tag: robot-item-sorter - - tag: stock-level-analyzer - - tag: alerting-stock-service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/logistics-team.yaml b/data/catalog/logistics-team.yaml deleted file mode 100644 index 5a1a080..0000000 --- a/data/catalog/logistics-team.yaml +++ /dev/null @@ -1,24 +0,0 @@ -info: - description: null - title: Logistics Team - version: 1.0.0 - x-cortex-links: - - name: Logistics Team Onboarding Guide - type: onboarding - - name: Logistics Team Best Practices - type: best_practices - x-cortex-tag: logistics-team - x-cortex-team: - groups: - - name: okta-logistics-team - provider: OKTA - members: - - email: cristina.buenahora@cortex.io - name: Cristina Buenahora - notificationsEnabled: true - - email: lisa.tran@cortex.io - name: Lisa Tran - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/model-innovation-team.yaml b/data/catalog/model-innovation-team.yaml deleted file mode 100644 index 9e1e2ab..0000000 --- a/data/catalog/model-innovation-team.yaml +++ /dev/null @@ -1,27 +0,0 @@ -info: - description: null - title: Model Innovation Team - version: 1.0.0 - x-cortex-links: - - name: Model Innovation Team Onboarding Guide - type: onboarding - - name: Model Innovation Team Best Practices - type: best_practices - x-cortex-tag: model-innovation-team - x-cortex-team: - groups: - - name: okta-model-innovation-team - provider: OKTA - members: - - email: nikhil.unni@cortex.io - name: Nikhil Unni - notificationsEnabled: true - - email: eli.berg@cortex.io - name: Eli Berg - notificationsEnabled: true - - email: eyal.foni@cortex.io - name: Eyal Foni - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/new-item-fanout-service.yaml b/data/catalog/new-item-fanout-service.yaml deleted file mode 100644 index 1a7aba5..0000000 --- a/data/catalog/new-item-fanout-service.yaml +++ /dev/null @@ -1,42 +0,0 @@ -info: - description: null - title: New item fanout service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (new-item-fanout-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 20/May/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for new-item-fanout-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: new-item-fanout-service - x-cortex-type: service - x-cortex-groups: - - public-api-test - x-cortex-git: - github: - repository: my-org/my-repo -openapi: 3.0.0 diff --git a/data/catalog/oauth2-identity-service.yaml b/data/catalog/oauth2-identity-service.yaml deleted file mode 100644 index 3d332c4..0000000 --- a/data/catalog/oauth2-identity-service.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: OAuth2 identity service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (oauth2-identity-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 23/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for oauth2-identity-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: oauth2-identity-service - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/order-management.yaml b/data/catalog/order-management.yaml deleted file mode 100644 index 0ce4cb3..0000000 --- a/data/catalog/order-management.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Order Management - version: 1.0.0 - x-cortex-links: - - name: Order Management Overview - type: documentation - - name: Order Management Specs - type: specs - - name: Order Management Bug bashes - type: bug_bashes - x-cortex-tag: order-management - x-cortex-type: domain - x-cortex-children: - - tag: warehousing - - tag: checkout - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/orders-events.yaml b/data/catalog/orders-events.yaml deleted file mode 100644 index 5436ffa..0000000 --- a/data/catalog/orders-events.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Orders Data Event Stream - version: 1.0.0 - x-cortex-links: - - name: Error Logs (orders-events) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: orders-events - x-cortex-type: kafka-topic - x-cortex-definition: - cluster: prod - serializationTool: grpc - topicName: eats-event - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/packaging-recommender.yaml b/data/catalog/packaging-recommender.yaml deleted file mode 100644 index 9426bf6..0000000 --- a/data/catalog/packaging-recommender.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Packaging recommender - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (packaging-recommender) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 12/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for packaging-recommender - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: packaging-recommender - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/payments-team.yaml b/data/catalog/payments-team.yaml deleted file mode 100644 index c6ff7c4..0000000 --- a/data/catalog/payments-team.yaml +++ /dev/null @@ -1,17 +0,0 @@ -info: - title: Payments - x-cortex-links: - - name: Payments Onboarding Guide - type: onboarding - - name: Payments Best Practices - type: best_practices - x-cortex-tag: payments-team - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.1 diff --git a/data/catalog/payments.yaml b/data/catalog/payments.yaml deleted file mode 100644 index f637003..0000000 --- a/data/catalog/payments.yaml +++ /dev/null @@ -1,22 +0,0 @@ -info: - description: null - title: Payments - version: 1.0.0 - x-cortex-links: - - name: Payments Overview - type: documentation - - name: Payments Specs - type: specs - - name: Payments Bug bashes - type: bug_bashes - x-cortex-tag: payments - x-cortex-type: domain - x-cortex-children: - - tag: transaction-store - - tag: credit-card-transaction-processor - - tag: ach-payments-nacha - - tag: fraud-analyzer - - tag: transaction-squid-proxy - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profile-management.yaml b/data/catalog/profile-management.yaml deleted file mode 100644 index 22235bf..0000000 --- a/data/catalog/profile-management.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Profile management - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (profile-management) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 02/Nov/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for profile-management - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: profile-management - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profile-pictures.yaml b/data/catalog/profile-pictures.yaml deleted file mode 100644 index b74ded7..0000000 --- a/data/catalog/profile-pictures.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: Public facing profile picture images in all sizes - title: Profile Pictures - version: 1.0.0 - x-cortex-links: - - name: Error Logs (profile-pictures) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: profile-pictures - x-cortex-type: s3 - x-cortex-definition: - containsPii: true - engineVersion: 5.7.mysql_aurora - region: us-east-1 - versioned: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profile-team.yaml b/data/catalog/profile-team.yaml deleted file mode 100644 index 49ad18c..0000000 --- a/data/catalog/profile-team.yaml +++ /dev/null @@ -1,33 +0,0 @@ -info: - description: null - title: Profile Team - version: 1.0.0 - x-cortex-links: - - name: Profile Team Onboarding Guide - type: onboarding - - name: Profile Team Best Practices - type: best_practices - x-cortex-tag: profile-team - x-cortex-team: - groups: - - name: okta-profile-team - provider: OKTA - members: - - email: cristina.buenahora@cortex.io - name: Cristina Buenahora - notificationsEnabled: true - - email: david.barnes@cortex.io - name: David Barnes - notificationsEnabled: true - - email: hanna.vigil@cortex.io - name: Hanna Vigil - notificationsEnabled: true - - email: stormy.adams@cortex.io - name: Stormy Adams - notificationsEnabled: true - - email: igor.rog@cortex.io - name: Igor Rog - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/profiles.yaml b/data/catalog/profiles.yaml deleted file mode 100644 index 8e4d00c..0000000 --- a/data/catalog/profiles.yaml +++ /dev/null @@ -1,22 +0,0 @@ -info: - description: null - title: Profiles - version: 1.0.0 - x-cortex-links: - - name: Profiles Overview - type: documentation - - name: Profiles Specs - type: specs - - name: Profiles Bug bashes - type: bug_bashes - x-cortex-tag: profiles - x-cortex-type: domain - x-cortex-children: - - tag: user-profile-metadata-service - - tag: profile-management - - tag: profile-pictures - - tag: user-profile-metadata-service-bucket - - tag: admin-customer-support - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/query-analyzer.yaml b/data/catalog/query-analyzer.yaml deleted file mode 100644 index 08ec766..0000000 --- a/data/catalog/query-analyzer.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Query analyzer - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (query-analyzer) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 28/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for query-analyzer - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: query-analyzer - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/recommendation-engine-kafka.yaml b/data/catalog/recommendation-engine-kafka.yaml deleted file mode 100644 index 65b1a76..0000000 --- a/data/catalog/recommendation-engine-kafka.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Recommendation Engine Kafka - version: 1.0.0 - x-cortex-links: - - name: Error Logs (recommendation-engine-kafka) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: recommendation-engine-kafka - x-cortex-type: kafka-topic - x-cortex-definition: - cluster: staging - serializationTool: avro - topicName: eats-event - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/recommendation-engine.yaml b/data/catalog/recommendation-engine.yaml deleted file mode 100644 index 062ed8b..0000000 --- a/data/catalog/recommendation-engine.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Recommendation engine - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (recommendation-engine) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 08/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for recommendation-engine - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: recommendation-engine - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/recommendations.yaml b/data/catalog/recommendations.yaml deleted file mode 100644 index 92272b9..0000000 --- a/data/catalog/recommendations.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: Recommendations - version: 1.0.0 - x-cortex-links: - - name: Recommendations Overview - type: documentation - - name: Recommendations Specs - type: specs - - name: Recommendations Bug bashes - type: bug_bashes - x-cortex-tag: recommendations - x-cortex-type: domain - x-cortex-children: - - tag: recommendation-engine - - tag: recommendation-engine-kafka - - tag: feed-calculator-job - - tag: new-item-fanout-service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/result-cacher-postgres.yaml b/data/catalog/result-cacher-postgres.yaml deleted file mode 100644 index d0e12d2..0000000 --- a/data/catalog/result-cacher-postgres.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: Result Cacher Postgres - version: 1.0.0 - x-cortex-links: - - name: Error Logs (result-cacher-postgres) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: result-cacher-postgres - x-cortex-type: rds - x-cortex-definition: - containsPii: false - dbFamily: sqlite - region: us-east-2 - version: 10.1.7 - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/retail-image-labeler.yaml b/data/catalog/retail-image-labeler.yaml deleted file mode 100644 index 4cc17c9..0000000 --- a/data/catalog/retail-image-labeler.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Retail image labeler - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (retail-image-labeler) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 04/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for retail-image-labeler - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: retail-image-labeler - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/returns-processor.yaml b/data/catalog/returns-processor.yaml deleted file mode 100644 index 4ee9a47..0000000 --- a/data/catalog/returns-processor.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Returns processor - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (returns-processor) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 01/Feb/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for returns-processor - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: returns-processor - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/robot-item-sorter.yaml b/data/catalog/robot-item-sorter.yaml deleted file mode 100644 index dc14d26..0000000 --- a/data/catalog/robot-item-sorter.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Robot item sorter - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (robot-item-sorter) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 13/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for robot-item-sorter - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: robot-item-sorter - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/search-experience.yaml b/data/catalog/search-experience.yaml deleted file mode 100644 index 4bd16e0..0000000 --- a/data/catalog/search-experience.yaml +++ /dev/null @@ -1,29 +0,0 @@ -info: - title: Search Experience - x-cortex-links: - - name: Search Experience Onboarding Guide - type: onboarding - - name: Search Experience Best Practices - type: best_practices - x-cortex-tag: search-experience - x-cortex-type: team - x-cortex-team: - members: - - email: cristina.buenahora@cortex.io - name: Cristina Buenahora - notificationsEnabled: true - - email: david.barnes@cortex.io - name: David Barnes - notificationsEnabled: true - - email: hanna.vigil@cortex.io - name: Hanna Vigil - notificationsEnabled: true - - email: stormy.adams@cortex.io - name: Stormy Adams - notificationsEnabled: true - - email: igor.rog@cortex.io - name: Igor Rog - notificationsEnabled: true - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/search.yaml b/data/catalog/search.yaml deleted file mode 100644 index 64c5583..0000000 --- a/data/catalog/search.yaml +++ /dev/null @@ -1,20 +0,0 @@ -info: - description: null - title: Search - version: 1.0.0 - x-cortex-links: - - name: Search Overview - type: documentation - - name: Search Specs - type: specs - - name: Search Bug bashes - type: bug_bashes - x-cortex-tag: search - x-cortex-type: domain - x-cortex-children: - - tag: data-ingestion - - tag: experience - - tag: recommendations - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/shipping-integrations.yaml b/data/catalog/shipping-integrations.yaml deleted file mode 100644 index 1832843..0000000 --- a/data/catalog/shipping-integrations.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Shipping integrations - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (shipping-integrations) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 26/May/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for shipping-integrations - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: shipping-integrations - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/sso-integration.yaml b/data/catalog/sso-integration.yaml deleted file mode 100644 index 33ab2f6..0000000 --- a/data/catalog/sso-integration.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: SSO integration - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (sso-integration) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 19/Mar/24 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for sso-integration - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: sso-integration - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/stock-level-analyzer.yaml b/data/catalog/stock-level-analyzer.yaml deleted file mode 100644 index 98abefa..0000000 --- a/data/catalog/stock-level-analyzer.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Stock level analyzer - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (stock-level-analyzer) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 07/Jul/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for stock-level-analyzer - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: stock-level-analyzer - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/team-a.yaml b/data/catalog/team-a.yaml deleted file mode 100644 index 41187e3..0000000 --- a/data/catalog/team-a.yaml +++ /dev/null @@ -1,15 +0,0 @@ -info: - title: Team A - x-cortex-tag: team-a - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-children: - - tag: team-b - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/team-b.yaml b/data/catalog/team-b.yaml deleted file mode 100644 index 5fd54b9..0000000 --- a/data/catalog/team-b.yaml +++ /dev/null @@ -1,15 +0,0 @@ -info: - title: Team B - x-cortex-tag: team-b - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-children: - - tag: team-c - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/team-c.yaml b/data/catalog/team-c.yaml deleted file mode 100644 index d87bd70..0000000 --- a/data/catalog/team-c.yaml +++ /dev/null @@ -1,15 +0,0 @@ -info: - title: Team C - x-cortex-tag: team-c - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-children: - - tag: team-d - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/team-d.yaml b/data/catalog/team-d.yaml deleted file mode 100644 index d1024dc..0000000 --- a/data/catalog/team-d.yaml +++ /dev/null @@ -1,13 +0,0 @@ -info: - title: Team D - x-cortex-tag: team-d - x-cortex-type: team - x-cortex-team: - members: - - email: jeff.schnitter@cortex.io - name: Jeff Schnitter - notificationsEnabled: false - x-cortex-groups: - - public-api-test - - letter-teams -openapi: 3.0.0 diff --git a/data/catalog/transaction-store.yaml b/data/catalog/transaction-store.yaml deleted file mode 100644 index 2b617b1..0000000 --- a/data/catalog/transaction-store.yaml +++ /dev/null @@ -1,39 +0,0 @@ -info: - description: null - title: Transaction store - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (transaction-store) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 01/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for transaction-store - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: transaction-store - x-cortex-type: service - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/transactions-squid-proxy.yaml b/data/catalog/transactions-squid-proxy.yaml deleted file mode 100644 index 13d8fda..0000000 --- a/data/catalog/transactions-squid-proxy.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: Production squid proxy used to route transaction traffic - title: Transaction Squid Proxy - version: 1.0.0 - x-cortex-links: - - name: Error Logs (transaction-squid-proxy) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: transaction-squid-proxy - x-cortex-type: squid-proxy - x-cortex-definition: - ip: 206.61.17.15 - vpc: us-east-2 - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/user-profile-metadata-service-bucket.yaml b/data/catalog/user-profile-metadata-service-bucket.yaml deleted file mode 100644 index 5e68a9b..0000000 --- a/data/catalog/user-profile-metadata-service-bucket.yaml +++ /dev/null @@ -1,21 +0,0 @@ -info: - description: null - title: User Profile Metadata Service Bucket - version: 1.0.0 - x-cortex-links: - - name: Error Logs (user-profile-metadata-service-bucket) - type: logs - - name: Grafana Dashboard - prod - type: dashboard - - name: Prometheus Dashboard - prod - type: dashboard - x-cortex-tag: user-profile-metadata-service-bucket - x-cortex-type: s3 - x-cortex-definition: - containsPii: false - engineVersion: 5.3.mysql_aurora - region: us-east-2 - versioned: false - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/user-profile-metadata-service.yaml b/data/catalog/user-profile-metadata-service.yaml deleted file mode 100644 index cac3714..0000000 --- a/data/catalog/user-profile-metadata-service.yaml +++ /dev/null @@ -1,41 +0,0 @@ -info: - description: null - title: User profile metadata service - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (user-profile-metadata-service) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 23/Aug/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for user-profile-metadata-service - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: user-profile-metadata-service - x-cortex-type: service - x-cortex-groups: - - public-api-test - x-cortex-custom-metadata: - hasSecurityScans: true -openapi: 3.0.0 diff --git a/data/catalog/usps-api-client.yaml b/data/catalog/usps-api-client.yaml deleted file mode 100644 index 0b1e85c..0000000 --- a/data/catalog/usps-api-client.yaml +++ /dev/null @@ -1,38 +0,0 @@ -info: - description: null - title: USPS API client - version: 1.0.0 - x-cortex-links: - - name: Load Balancer Logs (usps-api-client) - type: logs - description: Access and error logs for the nginx load balancer - - name: Design Document (Architecture) - type: documentation - description: presented at Arch Review 02/Jun/23 - - name: Grafana Dashboard - prod - type: dashboard - - name: Grafana Dashboard - staging - type: dashboard - description: Developer staging environment for usps-api-client - - name: Production logs - type: logs - description: Datadog query for logs from all prod hosts - - name: Monitoring Dashboard - prod - type: dashboard - description: System status and performance metrics - - name: User Documentation - type: documentation - description: User documentation for the application - - name: Deployment History - type: logs - description: Deployment history for April 2024 - - name: Error Monitoring Dashboard - type: dashboard - description: Error tracking and resolution dashboard - - name: Infrastructure Settings - type: settings - description: Infrastructure configuration and settings - x-cortex-tag: usps-api-client - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/catalog/warehousing.yaml b/data/catalog/warehousing.yaml deleted file mode 100644 index 074aca3..0000000 --- a/data/catalog/warehousing.yaml +++ /dev/null @@ -1,19 +0,0 @@ -info: - description: null - title: Warehousing - version: 1.0.0 - x-cortex-links: - - name: Warehousing Overview - type: documentation - - name: Warehousing Specs - type: specs - - name: Warehousing Bug bashes - type: bug_bashes - x-cortex-tag: warehousing - x-cortex-type: domain - x-cortex-children: - - tag: inventory - - tag: fulfillment - x-cortex-groups: - - public-api-test -openapi: 3.0.0 diff --git a/data/resource-definitions/api.json b/data/resource-definitions/api.json deleted file mode 100644 index 369f0d7..0000000 --- a/data/resource-definitions/api.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": "api", - "source": "CUSTOM", - "name": "API", - "description": "A Cortex API method.", - "schema": { - "type": "object", - "required": [], - "properties": {} - } -} diff --git a/data/resource-definitions/component.json b/data/resource-definitions/component.json deleted file mode 100644 index bed7e7f..0000000 --- a/data/resource-definitions/component.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "type": "component", - "source": "CUSTOM", - "name": "Component", - "description": "Used for public-api-test data.", - "schema": { - "type": "object" - } -} diff --git a/data/resource-definitions/k8s-cluster.json b/data/resource-definitions/k8s-cluster.json deleted file mode 100644 index 3c0dccc..0000000 --- a/data/resource-definitions/k8s-cluster.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "type": "k8s-cluster", - "source": "CUSTOM", - "name": "K8s Cluster", - "description": null, - "schema": { - "type": "object" - } -} diff --git a/data/resource-definitions/kafka-topic.json b/data/resource-definitions/kafka-topic.json deleted file mode 100644 index 68252cf..0000000 --- a/data/resource-definitions/kafka-topic.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "type": "kafka-topic", - "source": "CUSTOM", - "name": "Kafka Topic", - "description": null, - "schema": { - "type": "object", - "required": [ - "topicName", - "cluster", - "serializationTool" - ], - "properties": { - "cluster": { - "type": "string" - }, - "topicName": { - "type": "string" - }, - "serializationTool": { - "type": "string" - } - } - } -} diff --git a/data/resource-definitions/squid-proxy.json b/data/resource-definitions/squid-proxy.json deleted file mode 100644 index 45fca39..0000000 --- a/data/resource-definitions/squid-proxy.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "squid-proxy", - "source": "CUSTOM", - "name": "Squid Proxy", - "description": null, - "schema": { - "type": "object", - "required": [ - "ip", - "vpc" - ], - "properties": { - "ip": { - "type": "string" - }, - "vpc": { - "type": "string" - }, - "resources": { - "type": "string" - } - } - } -} diff --git a/data/run-time/archive-entity.yaml b/data/run-time/archive-entity.yaml index 03c76b5..c35a7f8 100644 --- a/data/run-time/archive-entity.yaml +++ b/data/run-time/archive-entity.yaml @@ -3,7 +3,6 @@ info: title: Archive Entity description: Entity that will be created and then archived to test catalog archive entity x-cortex-tag: archive-entity - x-cortex-type: component + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/create-entity-type-empty-schema.json b/data/run-time/create-entity-type-empty-schema.json index e6521c1..f0dd863 100644 --- a/data/run-time/create-entity-type-empty-schema.json +++ b/data/run-time/create-entity-type-empty-schema.json @@ -1,6 +1,6 @@ { - "description": "This is a test resource definition.", - "name": "Public API Type With Empty Schema", + "description": "This is a test entity type definition.", + "name": "CLI Test With Empty Schema", "schema": {}, - "type": "public-api-type-empty-schema" + "type": "cli-test-empty-schema" } diff --git a/data/run-time/create-entity.yaml b/data/run-time/create-entity.yaml index a5d0a96..38c7427 100644 --- a/data/run-time/create-entity.yaml +++ b/data/run-time/create-entity.yaml @@ -3,7 +3,6 @@ info: title: Create Entity description: Entity that will be created to test catalog create entity x-cortex-tag: create-entity - x-cortex-type: component + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/custom-data-bulk.json b/data/run-time/custom-data-bulk.json index 15538fc..a50d6f4 100644 --- a/data/run-time/custom-data-bulk.json +++ b/data/run-time/custom-data-bulk.json @@ -1,6 +1,6 @@ { "values": { - "backend-worker": [ + "test-service-caller": [ { "key": "bulk-key-1", "value": "value-1" @@ -10,7 +10,7 @@ "value": "value-2" } ], - "ach-payments-nacha": [ + "test-service-callee": [ { "key": "bulk-key-3", "value": "value-3" diff --git a/data/run-time/delete-entity.yaml b/data/run-time/delete-entity.yaml index 0f63e13..088138c 100644 --- a/data/run-time/delete-entity.yaml +++ b/data/run-time/delete-entity.yaml @@ -3,7 +3,6 @@ info: title: Delete Entity description: Entity that will be created and then deleted to test catalog delete entity x-cortex-tag: delete-entity - x-cortex-type: component + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/dependencies-bulk.json b/data/run-time/dependencies-bulk.json index f2c6fe4..ccde592 100644 --- a/data/run-time/dependencies-bulk.json +++ b/data/run-time/dependencies-bulk.json @@ -1,6 +1,6 @@ { "values": { - "fraud-analyzer": [ + "test-service-caller": [ { "description": "dependency description", "metadata": { @@ -9,7 +9,7 @@ }, "method": "GET", "path": "/api/v1/github/configurations", - "tag": "backend-worker" + "tag": "test-service-callee" } ] } diff --git a/data/run-time/test-domain-child.yaml b/data/run-time/test-domain-child.yaml new file mode 100644 index 0000000..4977f1e --- /dev/null +++ b/data/run-time/test-domain-child.yaml @@ -0,0 +1,9 @@ +openapi: 3.0.0 +info: + title: Test Domain Child + x-cortex-tag: test-domain-child + x-cortex-type: domain + x-cortex-groups: + - cli-test + x-cortex-children: + - tag: test-service diff --git a/data/run-time/test-domain-parent.yaml b/data/run-time/test-domain-parent.yaml new file mode 100644 index 0000000..4fdd0f2 --- /dev/null +++ b/data/run-time/test-domain-parent.yaml @@ -0,0 +1,9 @@ +openapi: 3.0.0 +info: + title: Test Domain Parent + x-cortex-tag: test-domain-parent + x-cortex-type: domain + x-cortex-groups: + - cli-test + x-cortex-children: + - tag: test-domain-child diff --git a/data/catalog/backend-worker.yaml b/data/run-time/test-service-callee.yaml similarity index 99% rename from data/catalog/backend-worker.yaml rename to data/run-time/test-service-callee.yaml index 120eacc..1481f04 100644 --- a/data/catalog/backend-worker.yaml +++ b/data/run-time/test-service-callee.yaml @@ -1,27 +1,10 @@ -openapi: 3.0.0 +openapi: 3.0.1 info: - description: "" - title: Backend Worker - version: 1.0.0 - x-cortex-link: - - name: Error Logs (backend-worker) - type: logs - url: https://cortex.io - - name: Grafana Dashboard - prod - type: dashboard - url: https://cortex.io - - name: Prometheus Dashboard - prod - type: dashboard - url: https://cortex.io - x-cortex-tag: backend-worker - x-cortex-type: component - x-cortex-definition: {} + title: Test Service Callee + x-cortex-tag: test-service-callee + x-cortex-type: service x-cortex-groups: - - public-api-test - - include-metadata-test - - include-links-test - x-cortex-custom-metadata: - cicd: circle-ci + - cli-api-test servers: - url: https://api.getcortexapp.com description: Cortex Cloud API host diff --git a/data/run-time/test-service-caller.yaml b/data/run-time/test-service-caller.yaml new file mode 100644 index 0000000..2b27f1c --- /dev/null +++ b/data/run-time/test-service-caller.yaml @@ -0,0 +1,7 @@ +openapi: 3.0.1 +info: + title: Test Service Caller + x-cortex-tag: test-service-caller + x-cortex-type: service + x-cortex-groups: + - cli-test diff --git a/data/run-time/test-service-group-1.yaml b/data/run-time/test-service-group-1.yaml new file mode 100644 index 0000000..084568a --- /dev/null +++ b/data/run-time/test-service-group-1.yaml @@ -0,0 +1,8 @@ +openapi: 3.0.1 +info: + title: Test Service Group 1 + x-cortex-tag: test-service-group-1 + x-cortex-type: service + x-cortex-groups: + - cli-test + - cli-test-group-1 diff --git a/data/run-time/test-service-group-2.yaml b/data/run-time/test-service-group-2.yaml new file mode 100644 index 0000000..7649fa9 --- /dev/null +++ b/data/run-time/test-service-group-2.yaml @@ -0,0 +1,8 @@ +openapi: 3.0.1 +info: + title: Test Service Group 2 + x-cortex-tag: test-service-group-2 + x-cortex-type: service + x-cortex-groups: + - cli-test + - cli-test-group-2 diff --git a/data/run-time/test-service-links.yaml b/data/run-time/test-service-links.yaml new file mode 100644 index 0000000..73d84af --- /dev/null +++ b/data/run-time/test-service-links.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Service Links + x-cortex-tag: test-service-links + x-cortex-type: service + x-cortex-link: + - url: https://cortex.io + name: Cortex + type: Documentation + x-cortex-groups: + - cli-test + - include-links-test diff --git a/data/run-time/test-service-metadata.yaml b/data/run-time/test-service-metadata.yaml new file mode 100644 index 0000000..df0f9b0 --- /dev/null +++ b/data/run-time/test-service-metadata.yaml @@ -0,0 +1,10 @@ +openapi: 3.0.1 +info: + title: Test Service Metadata + x-cortex-tag: test-service-metadata + x-cortex-type: service + x-cortex-groups: + - cli-test + - include-metadata-test + x-cortex-custom-metadata: + foo: bar diff --git a/data/run-time/test-service-test-team-1.yaml b/data/run-time/test-service-test-team-1.yaml new file mode 100644 index 0000000..e33dfb4 --- /dev/null +++ b/data/run-time/test-service-test-team-1.yaml @@ -0,0 +1,11 @@ +openapi: 3.0.1 +info: + title: Test Service Team 1 + x-cortex-tag: test-service-team-1 + x-cortex-type: service + x-cortex-owners: + - name: test-team-1 + type: GROUP + provider: CORTEX + x-cortex-groups: + - cli-test diff --git a/data/run-time/test-service-test-team-2.yaml b/data/run-time/test-service-test-team-2.yaml new file mode 100644 index 0000000..29cf759 --- /dev/null +++ b/data/run-time/test-service-test-team-2.yaml @@ -0,0 +1,11 @@ +openapi: 3.0.1 +info: + title: Test Service Test Team 2 + x-cortex-tag: test-service-test-team-2 + x-cortex-type: service + x-cortex-owners: + - name: test-team-2 + type: GROUP + provider: CORTEX + x-cortex-groups: + - cli-test diff --git a/data/run-time/test-service.yaml b/data/run-time/test-service.yaml new file mode 100644 index 0000000..f261543 --- /dev/null +++ b/data/run-time/test-service.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Service + x-cortex-tag: test-service + x-cortex-git: + github: + repository: my-org/my-repo + x-cortex-type: service + x-cortex-groups: + - cli-test + x-cortex-custom-metadata: + cicd: circle-ci diff --git a/data/run-time/test-team-1.yaml b/data/run-time/test-team-1.yaml new file mode 100644 index 0000000..aa4fecd --- /dev/null +++ b/data/run-time/test-team-1.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Team 1 + x-cortex-tag: test-team-1 + x-cortex-type: team + x-cortex-team: + members: + - email: bubbles.harnis@trailer-park.io + name: Bubbles Harnis + notificationsEnabled: false + x-cortex-groups: + - cli-test diff --git a/data/run-time/test-team-2.yaml b/data/run-time/test-team-2.yaml new file mode 100644 index 0000000..acacce8 --- /dev/null +++ b/data/run-time/test-team-2.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Team 2 + x-cortex-tag: test-team-2 + x-cortex-type: team + x-cortex-team: + members: + - email: jim.lahey@trailer-park.io + name: Jim Lahey + notificationsEnabled: false + x-cortex-groups: + - cli-test diff --git a/data/run-time/test-team-child.yaml b/data/run-time/test-team-child.yaml new file mode 100644 index 0000000..78d9727 --- /dev/null +++ b/data/run-time/test-team-child.yaml @@ -0,0 +1,12 @@ +openapi: 3.0.1 +info: + title: Test Team Child + x-cortex-tag: test-team-child + x-cortex-type: team + x-cortex-team: + members: + - email: bubbles.harnis@trailer-park.io + name: Bubbles Harnis + notificationsEnabled: false + x-cortex-groups: + - cli-test diff --git a/data/run-time/test-team-parent.yaml b/data/run-time/test-team-parent.yaml new file mode 100644 index 0000000..15505df --- /dev/null +++ b/data/run-time/test-team-parent.yaml @@ -0,0 +1,14 @@ +openapi: 3.0.1 +info: + title: Test Team Parent + x-cortex-tag: test-team-parent + x-cortex-type: team + x-cortex-team: + members: + - email: richard.lafleur@trailer-park.io + name: Richard LaFleur + notificationsEnabled: false + x-cortex-children: + - tag: test-team-child + x-cortex-groups: + - cli-test diff --git a/data/run-time/unarchive-entity.yaml b/data/run-time/unarchive-entity.yaml index 135a4fd..156e633 100644 --- a/data/run-time/unarchive-entity.yaml +++ b/data/run-time/unarchive-entity.yaml @@ -3,7 +3,6 @@ info: title: Unarchive Entity description: Entity that will be created, archived and then un-archived. x-cortex-tag: unarchive-entity - x-cortex-type: component + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test diff --git a/data/run-time/update-entity-type-empty-schema.json b/data/run-time/update-entity-type-empty-schema.json index a18d0ae..c2f8ad1 100644 --- a/data/run-time/update-entity-type-empty-schema.json +++ b/data/run-time/update-entity-type-empty-schema.json @@ -1,5 +1,5 @@ { - "description": "This is a test resource definition.", - "name": "Public API Type With Empty Schema -- Update", + "description": "This is a test entity type definition.", + "name": "CLI Test With Empty Schema -- Update", "schema": {} } diff --git a/data/scorecards/dumb.yaml b/data/scorecards/dumb.yaml deleted file mode 100644 index f9132f8..0000000 --- a/data/scorecards/dumb.yaml +++ /dev/null @@ -1,28 +0,0 @@ -tag: dumb-test -name: Dumb test -description: test -draft: false -notifications: - enabled: true -ladder: - name: Default Ladder - levels: - - name: Bronze - rank: 1 - color: '#cd7f32' - - name: Silver - rank: 2 - color: '#c0c0c0' - - name: Gold - rank: 3 - color: '#ffd700' -rules: -- title: Has link to logs - description: test - expression: links("logs").length > 0 - weight: 1 - level: Gold - failureMessage: "Blah, blah, blah" -filter: - query: hasGroup("public-api-test") and entity.type() == "component" - category: RESOURCE diff --git a/data/scorecards/production-readiness.yaml b/data/scorecards/production-readiness.yaml deleted file mode 100644 index d30a437..0000000 --- a/data/scorecards/production-readiness.yaml +++ /dev/null @@ -1,28 +0,0 @@ -tag: public-api-test-production-readiness -name: Public API Test Production Readiness -description: test -draft: false -notifications: - enabled: true -ladder: - name: Default Ladder - levels: - - name: Bronze - rank: 1 - color: '#cd7f32' - - name: Silver - rank: 2 - color: '#c0c0c0' - - name: Gold - rank: 3 - color: '#ffd700' -rules: -- title: Has link to logs - description: test - expression: links("logs").length > 0 - weight: 1 - level: Gold - failureMessage: "Blah, blah, blah" -filter: - query: hasGroup("public-api-test") and entity.type() == "component" - category: RESOURCE diff --git a/pyproject.toml b/pyproject.toml index 594e942..af62d20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,8 +24,7 @@ typer = "^0.12.5" click = "<8.2" [tool.poetry.scripts] -cortex = "cortexapps_cli.cortex:cli" -cortex2 = "cortexapps_cli.cli:app" +cortex = "cortexapps_cli.cli:app" [tool.poetry.group.test.dependencies] pytest = "^8.2.2" diff --git a/tests.orig/custom-events-configure.json b/tests.orig/custom-events-configure.json deleted file mode 100644 index c631b26..0000000 --- a/tests.orig/custom-events-configure.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "customData": { - "test3": 678, - "test4": "someVal3" - }, - "description": "Config event", - "timestamp": "2023-10-08T13:27:51.226Z", - "title": "config event", - "type": "CONFIG_SERVICE" -} diff --git a/tests.orig/custom-events.json b/tests.orig/custom-events.json deleted file mode 100644 index dfd1948..0000000 --- a/tests.orig/custom-events.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "customData": { - "test1": 345, - "test2": "someVal2" - }, - "description": "Validate event", - "timestamp": "2023-10-10T13:27:51.226Z", - "title": "validate event", - "type": "VALIDATE_SERVICE" -} diff --git a/tests.orig/data/catalog/cli-test-service-with-groups.yaml b/tests.orig/data/catalog/cli-test-service-with-groups.yaml deleted file mode 100644 index 1d31ec3..0000000 --- a/tests.orig/data/catalog/cli-test-service-with-groups.yaml +++ /dev/null @@ -1,13 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Service With Groups - x-cortex-tag: cli-test-service-with-groups - x-cortex-type: service - x-cortex-groups: - - corona-spokesperson - x-cortex-owners: - - name: test-team-1 - type: GROUP - provider: CORTEX - x-cortex-custom-metadata: - foo: bar diff --git a/tests.orig/data/catalog/cli-test-service.yaml b/tests.orig/data/catalog/cli-test-service.yaml deleted file mode 100644 index 63d87a3..0000000 --- a/tests.orig/data/catalog/cli-test-service.yaml +++ /dev/null @@ -1,71 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Service - x-cortex-git: - github: - repository: snoop-dogg/woof - x-cortex-tag: cli-test-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io - x-cortex-custom-metadata: - foo: bar - testField: abc -paths: - /: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - put: - summary: Set version - operationId: SetVersions - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - /2.0/users/{username}: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z diff --git a/tests.orig/data/catalog/test-service.yaml b/tests.orig/data/catalog/test-service.yaml deleted file mode 100644 index 16d10df..0000000 --- a/tests.orig/data/catalog/test-service.yaml +++ /dev/null @@ -1,83 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Service - x-cortex-git: - github: - repository: snoop-dogg/woof - x-cortex-tag: test-service - x-cortex-type: service - x-cortex-groups: - - corona-spokesperson - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io - - name: test-team-1 - type: GROUP - provider: CORTEX - x-cortex-custom-metadata: - foo: bar - testField: 123 - x-cortex-link: - - name: Cortex Homepage - type: DOCUMENTATION - url: https://cortex.io - - name: Cortex Documentation - type: DOCUMENTATION - url: https://docs.cortex.io -paths: - /: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - put: - summary: Set version - operationId: SetVersions - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z - /2.0/users/{username}: - get: - summary: List API versions - operationId: listVersionsv2 - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - id: v2.0 - links: - - rel: self - href: http://127.0.0.1:8774/v2/ - status: CURRENT - updated: 2011-01-21T11:33:21Z diff --git a/tests.orig/data/catalog/test-team-1.yaml b/tests.orig/data/catalog/test-team-1.yaml deleted file mode 100644 index 99b6d47..0000000 --- a/tests.orig/data/catalog/test-team-1.yaml +++ /dev/null @@ -1,10 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Team 1 - x-cortex-tag: test-team-1 - x-cortex-team: - members: - - name: Jeff Schnitter - email: jeff.schnitter@cortex.io - notificationsEnabled: true - x-cortex-type: team diff --git a/tests.orig/data/catalog/test-team-2.yaml b/tests.orig/data/catalog/test-team-2.yaml deleted file mode 100644 index 2fe824e..0000000 --- a/tests.orig/data/catalog/test-team-2.yaml +++ /dev/null @@ -1,10 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Team 2 - x-cortex-tag: test-team-2 - x-cortex-team: - members: - - name: Joe Montana - email: joe.montana@cortex.io - notificationsEnabled: true - x-cortex-type: team diff --git a/tests.orig/data/catalog/test_dependencies_dependency_service.yaml b/tests.orig/data/catalog/test_dependencies_dependency_service.yaml deleted file mode 100644 index 27780ee..0000000 --- a/tests.orig/data/catalog/test_dependencies_dependency_service.yaml +++ /dev/null @@ -1,13 +0,0 @@ -openapi: 3.0.1 -info: - title: Dependency Service - x-cortex-tag: dependency-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: richard.lafleur@cortex.io - x-cortex-dependency: - - tag: cli-test-service - path: "/2.0/users/{username}" - method: GET - description: ensure user has payment information configured diff --git a/tests.orig/data/json/resource-definitions.json b/tests.orig/data/json/resource-definitions.json deleted file mode 100644 index d86ddeb..0000000 --- a/tests.orig/data/json/resource-definitions.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "definitions": [ - { - "type": "test-resource-definition", - "source": "CUSTOM", - "name": "Test Resource Defintion -- Update", - "description": "This is a test resource definition.", - "schema": { - "type": "object" - } - } - ] -} diff --git a/tests.orig/data/json/resource_definitions.json b/tests.orig/data/json/resource_definitions.json deleted file mode 100644 index e69de29..0000000 diff --git a/tests.orig/data/json/test-resource-definition.json b/tests.orig/data/json/test-resource-definition.json deleted file mode 100644 index 6ca53c7..0000000 --- a/tests.orig/data/json/test-resource-definition.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"test-resource-definition","source":"CUSTOM","name":"Test Resource Defintion -- Update","description":"This is a test resource definition.","schema":{"type":"object"}} diff --git a/tests.orig/data/resource-definitions/test-resource-definition.json b/tests.orig/data/resource-definitions/test-resource-definition.json deleted file mode 100644 index d956988..0000000 --- a/tests.orig/data/resource-definitions/test-resource-definition.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"test-resource-definition","source":"CUSTOM","name":"Test Resource Definition","description":"This is a test resource definition.","schema":{"type":"object"}} diff --git a/tests.orig/data/scorecards/test-scorecard-draft.yaml b/tests.orig/data/scorecards/test-scorecard-draft.yaml deleted file mode 100644 index c16e9c1..0000000 --- a/tests.orig/data/scorecards/test-scorecard-draft.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard-draft -name: Test Scorecard Draft -description: Used to test Cortex CLI -draft: true -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "cli-test-service" - category: SERVICE diff --git a/tests.orig/data/scorecards/test-scorecard.yaml b/tests.orig/data/scorecards/test-scorecard.yaml deleted file mode 100644 index b2c11bc..0000000 --- a/tests.orig/data/scorecards/test-scorecard.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard -name: Test Scorecard -description: Used to test Cortex CLI -draft: false -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "cli-test-service" - category: SERVICE diff --git a/tests.orig/dependency-update.json b/tests.orig/dependency-update.json deleted file mode 100644 index f4ff253..0000000 --- a/tests.orig/dependency-update.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "This is the updated description of the dependency", - "metadata": { - "newField": "newField data", - "newField1": "newField1 data" - } -} diff --git a/tests.orig/test-custom-data-array.json b/tests.orig/test-custom-data-array.json deleted file mode 100644 index b69dbc1..0000000 --- a/tests.orig/test-custom-data-array.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "values": { - "test-service": [ - { - "key": "checklist", - "value": { - "ada": "yes", - "pii": "n/a", - "itar": "no", - "date": "Fri Aug 25 2023", - "signed_by": "Jeff Schnitter" - } - } - ] - } -} diff --git a/tests.orig/test-custom-data-bulk.json b/tests.orig/test-custom-data-bulk.json deleted file mode 100644 index be5cb7f..0000000 --- a/tests.orig/test-custom-data-bulk.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "values": { - "test-service": [ - { - "key": "bulk-key-1", - "value": "key-1" - }, - { - "key": "bulk-key-2", - "value": "key-2" - } - ], - "test-service": [ - { - "key": "bulk-key-3", - "value": "key-3" - }, - { - "key": "bulk-key-4", - "value": "key-4" - } - ] - } -} diff --git a/tests.orig/test-custom-data.json b/tests.orig/test-custom-data.json deleted file mode 100644 index d2d5bdb..0000000 --- a/tests.orig/test-custom-data.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "description": "custom data field description", - "key": "anotherTestField", - "value": "123" -} diff --git a/tests.orig/test-resource-definition-update.json b/tests.orig/test-resource-definition-update.json deleted file mode 100644 index d5438ca..0000000 --- a/tests.orig/test-resource-definition-update.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "description": "This is a test resource definition.", - "name": "Test Resource Defintion -- Update", - "schema": {} -} diff --git a/tests.orig/test-resource-definition.json b/tests.orig/test-resource-definition.json deleted file mode 100644 index 360c21d..0000000 --- a/tests.orig/test-resource-definition.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "description": "This is a test resource definition.", - "name": "Test Resource Defintion", - "schema": {}, - "type": "test-resource-definition" -} diff --git a/tests.orig/test_audit_logs.py b/tests.orig/test_audit_logs.py deleted file mode 100644 index 1acb14e..0000000 --- a/tests.orig/test_audit_logs.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Tests for the audit-logs commands. -""" -from cortexapps_cli.cortex import cli -from datetime import datetime, timedelta, timezone -import json -import sys -import pytest - -def test_audit_logs_get(): - cli(["audit-logs", "get"]) - -def test_audit_logs_page_size(capsys): - cli(["audit-logs", "get", "-p", "1", "-z", "5"]) - out, err = capsys.readouterr() - out = json.loads(out) - assert len(out['logs']) == 5, "Changing page size should return requested amount of entries" - -def test_audit_logs_with_start_and_end(capsys): - now = datetime.now() - yesterday = now - timedelta(days=1) - cli(["audit-logs", "get", "-e", now.isoformat(), "-s", yesterday.isoformat()]) - -def test_audit_logs_with_start(): - now = datetime.now() - yesterday = now - timedelta(days=1) - cli(["audit-logs", "get", "-s", yesterday.isoformat()]) - -def test_audit_logs_with_end(): - now = datetime.now() - yesterday = now - timedelta(days=1) - cli(["audit-logs", "get", "-e", yesterday.isoformat()]) diff --git a/tests.orig/test_backup.py b/tests.orig/test_backup.py deleted file mode 100644 index 37da787..0000000 --- a/tests.orig/test_backup.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Tests for backup commands. -""" -from cortexapps_cli.cortex import cli - -import pytest -import sys - -def test_import(capsys): - cli(["backup", "import", "-d", "tests/test_backup_export"]) - diff --git a/tests.orig/test_backup_export/catalog/test-service-import-2.yaml b/tests.orig/test_backup_export/catalog/test-service-import-2.yaml deleted file mode 100644 index 3835c8d..0000000 --- a/tests.orig/test_backup_export/catalog/test-service-import-2.yaml +++ /dev/null @@ -1,5 +0,0 @@ -openapi: 3.0.1 -info: - title: Test Service Import 2 - x-cortex-tag: test-service-import-2 - x-cortex-type: service diff --git a/tests.orig/test_backup_export/catalog/test-service-import.yaml b/tests.orig/test_backup_export/catalog/test-service-import.yaml deleted file mode 100644 index 06b4494..0000000 --- a/tests.orig/test_backup_export/catalog/test-service-import.yaml +++ /dev/null @@ -1,5 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Service Import - x-cortex-tag: cli-test-service-import - x-cortex-type: service diff --git a/tests.orig/test_backup_export/json/ip-allowlist.json b/tests.orig/test_backup_export/json/ip-allowlist.json deleted file mode 100644 index 6a480c6..0000000 --- a/tests.orig/test_backup_export/json/ip-allowlist.json +++ /dev/null @@ -1 +0,0 @@ -{"entries":[]} diff --git a/tests.orig/test_backup_export/resource-definitions/test-resource-definition.json b/tests.orig/test_backup_export/resource-definitions/test-resource-definition.json deleted file mode 100644 index d956988..0000000 --- a/tests.orig/test_backup_export/resource-definitions/test-resource-definition.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"test-resource-definition","source":"CUSTOM","name":"Test Resource Definition","description":"This is a test resource definition.","schema":{"type":"object"}} diff --git a/tests.orig/test_backup_export/scorecards/test-scorecard.yaml b/tests.orig/test_backup_export/scorecards/test-scorecard.yaml deleted file mode 100644 index 6db0c32..0000000 --- a/tests.orig/test_backup_export/scorecards/test-scorecard.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard-import -name: Test Scorecard Import -description: Used to test Cortex CLI -draft: false -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "test-service" - category: SERVICE diff --git a/tests.orig/test_backup_export/teams/test-team-3.json b/tests.orig/test_backup_export/teams/test-team-3.json deleted file mode 100644 index 7bbff10..0000000 --- a/tests.orig/test_backup_export/teams/test-team-3.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "teamTag": "test-team-3", - "metadata": { - "name": "Test Team 3", - "description": null, - "summary": null - }, - "links": [], - "slackChannels": [], - "additionalMembers": [], - "isArchived": false, - "cortexTeam": { - "members": [ - { - "email": "jeff.schnitter@cortex.io", - "name": "Jeff Schnitter", - "description": "", - "role": null - } - ] - }, - "type": "CORTEX" -} diff --git a/tests.orig/test_catalog-invalid-service.yaml b/tests.orig/test_catalog-invalid-service.yaml deleted file mode 100644 index ab87147..0000000 --- a/tests.orig/test_catalog-invalid-service.yaml +++ /dev/null @@ -1,8 +0,0 @@ -openapi: 3.0.1 -info: - title: Invalid Service to test dryrun - x-cortex-bag: invalid-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io diff --git a/tests.orig/test_catalog.py b/tests.orig/test_catalog.py deleted file mode 100644 index 7f781ee..0000000 --- a/tests.orig/test_catalog.py +++ /dev/null @@ -1,107 +0,0 @@ -""" -Tests for the catalog methods. -""" - -from cortexapps_cli.cortex import cli -import json -import pytest -import sys - -def test_catalog_create_service(capsys): - cli(["catalog", "create", "-f", "tests/test_catalog_create_service.yaml"]) - -def test_retrieve_service(capsys): - cli(["catalog", "descriptor", "-y", "-t", "cli-test-service"]) - -def test_dryrun(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "create", "-d", "-f", "tests/test_catalog-invalid-service.yaml"]) - out, err = capsys.readouterr() - assert json.loads(out)['type'] == "BAD_REQUEST" - -def test_details(capsys): - cli(["catalog", "details", "-t", "cli-test-service"]) - -def test_details_hierarchy_fields(capsys): - cli(["catalog", "details", "-i", "groups", "-t", "cli-test-service"]) - -def test_list(capsys): - cli(["catalog", "list"]) - -def test_list_page(capsys): - cli(["catalog", "list", "-p", "0"]) - -def test_list_page_size(capsys): - cli(["catalog", "list", "-z", "100"]) - -def test_list_include_hierarchy_fields(capsys): - cli(["catalog", "list", "-i", "groups", "-z", "50"]) - -def test_list_page_and_page_size(capsys): - cli(["catalog", "list", "-p", "0", "-z", "2"]) - out, err = capsys.readouterr() - assert len(json.loads(out)['entities']) == 2 - -def test_list_with_parms(capsys): - cli(["catalog", "list", "-g", "corona-spokesperson", "-d", "1", "-t", "service", "-a", "-m" ]) - out, err = capsys.readouterr() - out = json.loads(out) - assert any(service['tag'] == 'cli-test-service-with-groups' for service in out['entities']) - assert not(out['entities'][0]['metadata'][0]["key"] is None), "Custom metadata should have been in result" - -# Archiving a service can impact it from being seen by other operations. Should probably be done with a separate -# service -def test_archive(): - cli(["catalog", "archive", "-t", "cli-test-service"]) - cli(["catalog", "unarchive", "-t", "cli-test-service"]) - -def test_list_by_team(capsys): - cli(["catalog", "list", "-o", "test-team-1" ]) - out, err = capsys.readouterr() - out = json.loads(out) - assert any(service['tag'] == 'cli-test-service-with-groups' for service in out['entities']) - -def test_list_with_owners(capsys): - cli(["catalog", "list", "-l", "-io", "-g", "corona-spokesperson"]) - out, err = capsys.readouterr() - out = json.loads(out) - - found_service = False - for entity in out['entities']: - if entity['tag'] == "test-service": - assert len(entity['links']) > 0 - assert len(entity['owners']) > 0 - found_service = True - - assert found_service - -def test_list_descriptors(capsys, tmp_path): - cli(["catalog", "list-descriptors", "-z", "1", "-p", "0", "-y"]) - out, err = capsys.readouterr() - out = json.loads(out) - - f = tmp_path / "descriptor.yaml" - f.write_text(out["descriptors"][0]) - - # Should be able to have a dryrun validate the yaml - cli(["catalog", "create", "-d", "-f", str(f)]) - -# Since gitops not set up for this service, it should return "Not Found". -# Kind of a cheap way out for this test, but it does validate the metod -# was accepted and returnd a value. -def test_gitops_logs(capsys): - # Must be raised as exception, because of the expected 404 status code. - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "gitops-logs", "-t", "test-service"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 - -# Not checking any output because we cannot guarantee scorecards have -# been evaluated. -# -# Can change this in the future when there is a way to ensure that a -# scorecard has been evaluated. -def test_scorecard_scores(capsys): - cli(["catalog", "scorecard-scores", "-t", "test-service"]) diff --git a/tests.orig/test_catalog_create_service.yaml b/tests.orig/test_catalog_create_service.yaml deleted file mode 100644 index bd3d1c5..0000000 --- a/tests.orig/test_catalog_create_service.yaml +++ /dev/null @@ -1,5 +0,0 @@ -openapi: 3.0.1 -info: - title: CLI Test Create Service - x-cortex-tag: cli-test-create - x-cortex-type: service diff --git a/tests.orig/test_command_line_opts.py b/tests.orig/test_command_line_opts.py deleted file mode 100644 index 78d2e6b..0000000 --- a/tests.orig/test_command_line_opts.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Tests for version commands. -""" -from cortexapps_cli.cortex import cli -import pytest - -def test_version(): - with pytest.raises(SystemExit) as excinfo: - cli(["-v"]) - -def test_help(): - with pytest.raises(SystemExit) as excinfo: - cli(["-h"]) - -def test_no_parms(): - with pytest.raises(SystemExit) as excinfo: - cli([]) - -def test_integrations_no_parms(): - with pytest.raises(SystemExit) as excinfo: - cli(["integrations"]) - -def test_integrations_help(): - with pytest.raises(SystemExit) as excinfo: - cli(["integrations", "-h"]) - -def test_integrations_command(): - with pytest.raises(SystemExit) as excinfo: - cli(["integrations", "aws"]) - -def test_command_no_options(): - with pytest.raises(SystemExit) as excinfo: - cli(["catalog"]) diff --git a/tests.orig/test_config_file.py b/tests.orig/test_config_file.py deleted file mode 100644 index 64bcd31..0000000 --- a/tests.orig/test_config_file.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -Tests for the cortex CLI config file -""" - -# These tests are all marked to run in serial order because they make modifications to the -# cortex config file and/or CORTEX_API_KEY value and would potentially impact other tests -# that are running in parallel (with poetry run pytest -n auto), so they are run separately. - -# Additionally, order is VERY IMPORTANT in this file because of the way CORTEX_API key is -# deleted, set to invalid values, etc. Moving test order could impact the overall success -# of pytest. Tread carefully here. -from cortexapps_cli.cortex import cli - -import io -import os -import pytest -import sys -from string import Template - -# Requires user input, so use monkeypatch to set it. -@pytest.fixture(scope="session") -def delete_cortex_api_key(): - if "CORTEX_API_KEY" in os.environ: - del os.environ['CORTEX_API_KEY'] - -@pytest.mark.serial -def test_config_file_api_key_quotes(tmp_path): - cortex_api_key = os.getenv('CORTEX_API_KEY') - f = tmp_path / "cortex_config_api_key_quotes" - template = Template(""" - [default] - api_key = "${cortex_api_key}" - """) - content = template.substitute(cortex_api_key=cortex_api_key) - print(content) - f.write_text(content) - cli(["-c", str(f), "teams", "list"]) - -@pytest.mark.serial -def test_environment_variables(capsys): - cli(["teams", "list"]) - out, err = capsys.readouterr() - #print(out) - print("ERR = " + err) - assert err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY", "Warning should be displayed by default" - - cli(["-q", "teams", "list"]) - out, err = capsys.readouterr() - assert not(err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY"), "Warning should be displayed with -q option" - -@pytest.mark.serial -def test_config_file_create(monkeypatch, tmp_path, delete_cortex_api_key): - with pytest.raises(SystemExit) as excinfo: - monkeypatch.setattr('sys.stdin', io.StringIO('Y')) - f = tmp_path / "test-config.txt" - cli(["-c", str(f), "catalog", "list"]) - -@pytest.mark.serial -def test_config_file_new(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config" - content = """ - [default] - api_key = REPLACE_WITH_YOUR_CORTEX_API_KEY - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - -@pytest.mark.serial -def test_export(capsys, delete_cortex_api_key): - cli(["-t", "rich-sandbox", "backup", "export"]) - out, err = capsys.readouterr() - last_line = out.strip().split("\n")[-1] - sys.stdout.write(out + "\n\n") - sys.stdout.write(last_line + "\n\n") - assert "rich-sandbox" in out - -@pytest.mark.serial -def test_config_file_bad_api_key(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config_bad_api_key" - content = """ - [default] - api_key = invalidApiKey - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" - -@pytest.mark.serial -def test_environment_variable_invalid_key(capsys): - with pytest.raises(SystemExit) as excinfo: - os.environ["CORTEX_API_KEY"] = "invalidKey" - cli(["teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" diff --git a/tests.orig/test_custom_data.py b/tests.orig/test_custom_data.py deleted file mode 100644 index a36a025..0000000 --- a/tests.orig/test_custom_data.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Tests for custom-data commands. -""" -from cortexapps_cli.cortex import cli -import json - -def test_custom_data(): - cli(["custom-data", "add", "-t", "test-service", "-f", "tests/test-custom-data.json"]) - cli(["custom-data", "list", "-t", "test-service"]) - -def test_custom_data_bulk(): - cli(["custom-data", "bulk", "-f", "tests/test-custom-data-bulk.json"]) - -def test_custom_data_bulk_array(): - cli(["custom-data", "bulk", "-f", "tests/test-custom-data-array.json"]) - -def test_custom_data_get(): - cli(["custom-data", "get", "-t", "test-service", "-k", "foo"]) diff --git a/tests.orig/test_custom_events.py b/tests.orig/test_custom_events.py deleted file mode 100644 index f81c817..0000000 --- a/tests.orig/test_custom_events.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Tests for custom-events commands. -""" -from cortexapps_cli.cortex import cli -import json -import time - -def test_custom_events_create(capsys): - cli(["custom-events", "create", "-t", "test-service", "-f", "tests/custom-events.json"]) - cli(["custom-events", "list", "-t", "test-service"]) - cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE"]) - cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE", "-i", "2023-10-10T13:27:51.226"]) - - -def test_custom_event_uuid(capsys): - cli(["custom-events", "create", "-t", "test-service", "-f", "tests/custom-events-configure.json"]) - out, err = capsys.readouterr() - out = json.loads(out) - uuid = out['uuid'] - cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid]) - cli(["custom-events", "update-by-uuid", "-t", "test-service", "-u", uuid, "-f", "tests/custom-events.json"]) - cli(["custom-events", "delete-by-uuid", "-t", "test-service", "-u", uuid]) - cli(["custom-events", "delete-all", "-t", "test-service"]) - diff --git a/tests.orig/test_dependencies.json b/tests.orig/test_dependencies.json deleted file mode 100644 index 5cd19e1..0000000 --- a/tests.orig/test_dependencies.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "This is a description of the dependency", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - } -} diff --git a/tests.orig/test_dependencies.py b/tests.orig/test_dependencies.py deleted file mode 100644 index af727d1..0000000 --- a/tests.orig/test_dependencies.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Tests for teams commands. -""" -from cortexapps_cli.cortex import cli - -def test_dependencies(capsys): - cli(["dependencies", "delete-all", "-r", "dependency-service"]) - cli(["dependencies", "add", "-r", "dependency-service", "-e", - "test-service", "-m", "GET", "-p", "/2.0/users/{username}", "-f", "tests/test_dependencies.json"]) - - cli(["dependencies", "delete", "-r", "dependency-service", "-e", "test-service", "-m", "GET", "-p", "/2.0/users/{username}"]) - - cli(["dependencies", "add-in-bulk", "-f", "tests/test_dependencies_bulk.json"]) - - cli(["dependencies", "get", "-r", "dependency-service", "-e", "test-service", "-m", "GET", "-p", "/2.0/users/{username}"]) - - cli(["dependencies", "get-all", "-r", "dependency-service", "-o"]) - - cli(["dependencies", "update", "-r", "dependency-service", "-e", "test-service", "-m", "GET", "-p", "/2.0/users/{username}", "-f", "tests/test_dependencies_update.json"]) - - cli(["dependencies", "add-in-bulk", "-f", "tests/test_dependencies_bulk.json"]) - cli(["dependencies", "delete-in-bulk", "-f", "tests/test_dependencies_bulk.json"]) - cli(["dependencies", "delete-all", "-r", "dependency-service"]) diff --git a/tests.orig/test_dependencies_bulk.json b/tests.orig/test_dependencies_bulk.json deleted file mode 100644 index 5e1c65e..0000000 --- a/tests.orig/test_dependencies_bulk.json +++ /dev/null @@ -1,16 +0,0 @@ -{ -"values": { - "dependency-service": [ - { - "description": "dependency descr", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - }, - "method": "GET", - "path": "/2.0/users/{username}", - "tag": "test-service" - } - ] -} -} diff --git a/tests.orig/test_dependencies_update.json b/tests.orig/test_dependencies_update.json deleted file mode 100644 index f4ff253..0000000 --- a/tests.orig/test_dependencies_update.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "This is the updated description of the dependency", - "metadata": { - "newField": "newField data", - "newField1": "newField1 data" - } -} diff --git a/tests.orig/test_deploys.json b/tests.orig/test_deploys.json deleted file mode 100644 index e934c4d..0000000 --- a/tests.orig/test_deploys.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customData": { - "test-field-1": "value1" - }, - "deployer": { - "email": "test-user@example.com", - "name": "Test Deployer" - }, - "environment": "testEnv", - "sha": "SHA-123456", - "timestamp": "2023-08-16T22:55:38.284Z", - "title": "deploy-001", - "type": "DEPLOY" -} diff --git a/tests.orig/test_deploys.py b/tests.orig/test_deploys.py deleted file mode 100644 index b418eb6..0000000 --- a/tests.orig/test_deploys.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Tests for deploys commands. -""" -from cortexapps_cli.cortex import cli -import json - -def _add_deploy(): - cli(["deploys", "add", "-t", "cli-test-service", "-f", "tests/test_deploys.json"]) - -def test_deploys(capsys): - # This has to be the first call to the cli because we want to capture the output and capsys - # captures output collectively. - cli(["deploys", "add", "-t", "cli-test-service", "-f", "tests/test_deploys_uuid.json"]) - out, err = capsys.readouterr() - out = json.loads(out) - uuid = out['uuid'] - - cli(["-d", "deploys", "update-by-uuid", "-t", "cli-test-service", "-u", uuid, "-f", "tests/test_deploys_update.json"]) - - cli(["deploys", "delete-by-uuid", "-t", "cli-test-service", "-u", uuid]) - - _add_deploy() - - cli(["deploys", "list", "-t", "cli-test-service"]) - - cli(["deploys", "delete", "-t", "cli-test-service", "-s", "SHA-123456"]) - - _add_deploy() - cli(["deploys", "delete-filter", "-y", "DEPLOY"]) - - _add_deploy() - cli(["deploys", "delete-all"]) diff --git a/tests.orig/test_deploys_update.json b/tests.orig/test_deploys_update.json deleted file mode 100644 index 92e8e58..0000000 --- a/tests.orig/test_deploys_update.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customData": { - "test-field-1": "value1" - }, - "deployer": { - "email": "test-user@example.com", - "name": "Test Deployer" - }, - "environment": "testEnv", - "sha": "SHA-456789", - "timestamp": "2023-11-29T22:55:38.284Z", - "title": "deploy-001", - "type": "DEPLOY" -} diff --git a/tests.orig/test_deploys_uuid.json b/tests.orig/test_deploys_uuid.json deleted file mode 100644 index fa6d6df..0000000 --- a/tests.orig/test_deploys_uuid.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customData": { - "test-field-2": "value2" - }, - "deployer": { - "email": "test-user@example.com", - "name": "Test Deployer" - }, - "environment": "testEnv", - "sha": "SHA-999999", - "timestamp": "2023-11-29T16:29:38.284Z", - "title": "deploy-002", - "type": "DEPLOY" -} diff --git a/tests.orig/test_discovery_audit.py b/tests.orig/test_discovery_audit.py deleted file mode 100644 index 6c4993e..0000000 --- a/tests.orig/test_discovery_audit.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Tests for the discovery-audit commands. -""" -from cortexapps_cli.cortex import cli - -def test_discovery_audit_get(): - cli(["discovery-audit", "get"]) - -def test_discovery_audit_get_include_ignored(): - cli(["discovery-audit", "get", "-i"]) - -def test_discovery_audit_filter_on_source(): - cli(["discovery-audit", "get", "-s", "GITHUB"]) - -def test_discovery_audit_filter_on_type(): - cli(["discovery-audit", "get", "-t", "NEW_REPOSITORY"]) - diff --git a/tests.orig/test_docs.py b/tests.orig/test_docs.py deleted file mode 100644 index f4ff257..0000000 --- a/tests.orig/test_docs.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Tests for docs commands. -""" -from cortexapps_cli.cortex import cli - -def test_docs(): - cli(["docs", "update", "-t", "cli-test-service", "-f", "tests/test_docs.yaml"]) - - cli(["docs", "get", "-t", "cli-test-service"]) - - cli(["docs", "delete", "-t", "cli-test-service"]) diff --git a/tests.orig/test_docs.yaml b/tests.orig/test_docs.yaml deleted file mode 100644 index 053daca..0000000 --- a/tests.orig/test_docs.yaml +++ /dev/null @@ -1,42 +0,0 @@ -openapi: 3.0.0 -info: - title: Simple API overview - version: 2.0.0 -paths: - /: - get: - operationId: listVersionsv2 - summary: List API versions with 'full' history - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - status: CURRENT - updated: "2011-01-21T11:33:21Z" - id: v2.0 - links: - - href: http://127.0.0.1:8774/v2/ - rel: self - put: - operationId: SetVersions - summary: Set version - responses: - "200": - description: 200 response - content: - application/json: - examples: - foo: - value: - versions: - - status: CURRENT - updated: "2011-01-21T11:33:21Z" - id: v2.0 - links: - - href: http://127.0.0.1:8774/v2/ - rel: self diff --git a/tests.orig/test_gitops_logs.py b/tests.orig/test_gitops_logs.py deleted file mode 100644 index e4d4be0..0000000 --- a/tests.orig/test_gitops_logs.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Tests for the gitops-logs commands. -""" -from cortexapps_cli.cortex import cli -from datetime import datetime, timedelta, timezone -import json -import sys -import pytest - -def test_gitops_logs_get(): - cli(["gitops-logs", "get"]) - -def test_gitops_logs_page_size(capsys): - cli(["-d", "gitops-logs", "get", "-p", "1", "-z", "5"]) - out, err = capsys.readouterr() - out = json.loads(out) - assert len(out['logs']) == 5, "Changing page size should return requested amount of entries" diff --git a/tests.orig/test_groups.py b/tests.orig/test_groups.py deleted file mode 100644 index d835584..0000000 --- a/tests.orig/test_groups.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -Tests for groups commands. -""" -from cortexapps_cli.cortex import cli - -def test_groups_add(): - cli(["groups", "add", "-t", "test-service", "-f", "tests/test-groups.json"]) - -def test_groups_get(): - cli(["groups", "get", "-t", "test-service"]) - -def test_groups_delete(): - cli(["groups", "delete", "-t", "test-service", "-f", "tests/test-groups.json"]) - cli(["groups", "get", "-t", "test-service"]) diff --git a/tests.orig/test_integrations_aws.py b/tests.orig/test_integrations_aws.py deleted file mode 100644 index d64d717..0000000 --- a/tests.orig/test_integrations_aws.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Tests for aws integration commands. -""" -from cortexapps_cli.cortex import cli -import os -import responses - -# Since responses are all mocked and no data validation is done by the CLI -- -# we let the API handle validation -- we don't need valid input files. -def _dummy_file(tmp_path): - f = tmp_path / "test_integrations_newrelic_add.json" - f.write_text("foobar") - return f - -@responses.activate -def test_integrations_aws_add(tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", json=[{'accountId': 123, 'role:': 'test'}], status=200) - cli(["integrations", "aws", "add", "-a", "123", "-r", "test"]) - -@responses.activate -def test_integrations_aws_delete(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/123456", status=200) - cli(["integrations", "aws", "delete", "-a", "123456"]) - - -@responses.activate -def test_integrations_aws_delete_all(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) - cli(["integrations", "aws", "delete-all"]) - -@responses.activate -def test_integrations_aws_get(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/123456", status=200) - cli(["integrations", "aws", "get", "-a", "123456"]) - -@responses.activate -def test_integrations_aws_get_all(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) - cli(["integrations", "aws", "get-all"]) - -@responses.activate -def test_integrations_aws_update(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) - cli(["integrations", "aws", "update", "-f", str(f)]) - -@responses.activate -def test_integrations_aws_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/validate/123456", status=200) - cli(["integrations", "aws", "validate", "-a", "123456"]) - -@responses.activate -def test_integrations_aws_validate_all(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations/all/validate", status=200) - cli(["integrations", "aws", "validate-all"]) diff --git a/tests.orig/test_integrations_aws_config.json b/tests.orig/test_integrations_aws_config.json deleted file mode 100644 index 437cacb..0000000 --- a/tests.orig/test_integrations_aws_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "configurations": [ - { - "accountId": "123456", - "role": "my-new-role" - }, - { - "accountId": "284224559761", - "role": "Test-Role-Jeff" - } - ] -} diff --git a/tests.orig/test_integrations_azure_resources.py b/tests.orig/test_integrations_azure_resources.py deleted file mode 100644 index 49e9963..0000000 --- a/tests.orig/test_integrations_azure_resources.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Tests for azure-resources integration commands. - -These tests all use mock responses. -""" -from cortexapps_cli.cortex import cli -import os -import responses - -# Since responses are all mocked and no data validation is done by the CLI -- -# we let the API handle validation -- we don't need valid input files. -def _dummy_file(tmp_path): - f = tmp_path / "test_integrations_azure_resources_add.json" - f.write_text("foobar") - return f - -@responses.activate -def test_integrations_azure_resources_add(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration", json={}, status=200) - cli(["integrations", "azure-resources", "add", "-f", str(f)]) - -@responses.activate -def test_integrations_azure_resources_add_multiple(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", json={}, status=200) - cli(["integrations", "azure-resources", "add-multiple", "-f", str(f)]) - -@responses.activate -def test_integrations_azure_resources_delete(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", status=200) - cli(["integrations", "azure-resources", "delete", "-a", "test"]) - -@responses.activate -def test_integrations_azure_resources_delete_all(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", status=200) - cli(["integrations", "azure-resources", "delete-all"]) - -@responses.activate -def test_integrations_azure_resources_get(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", json={}, status=200) - cli(["integrations", "azure-resources", "get", "-a", "test"]) - -@responses.activate -def test_integrations_azure_resources_get_all(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", json={}, status=200) - cli(["integrations", "azure-resources", "get-all"]) - -@responses.activate -def test_integrations_azure_resources_get_default(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/default-configuration", json={}, status=200) - cli(["integrations", "azure-resources", "get-default"]) - -@responses.activate -def test_integrations_azure_resources_update(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/test", json={}, status=200) - cli(["integrations", "azure-resources", "update", "-a", "test", "-f", str(f)]) - -@responses.activate -def test_integrations_azure_resources_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/validate/test", json={}, status=200) - cli(["integrations", "azure-resources", "validate", "-a", "test"]) - -@responses.activate -def test_integrations_azure_resources_validate_all(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configuration/validate", json={}, status=200) - cli(["integrations", "azure-resources", "validate-all"]) diff --git a/tests.orig/test_integrations_coralogix.py b/tests.orig/test_integrations_coralogix.py deleted file mode 100644 index 31ac364..0000000 --- a/tests.orig/test_integrations_coralogix.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Tests for coralogix integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import json -import os -import pytest -import responses - -coralogix_api_key = json.dumps("fakeKey") - -def _coralogix_input(tmp_path): - f = tmp_path / "test_integrations_coralogix_add.json" - template = Template(""" - { - "alias": "test", - "apiKey": ${coralogix_api_key}, - "isDefault": true, - "region": "US1" - } - """) - content = template.substitute(coralogix_api_key=coralogix_api_key) - f.write_text(content) - return f - -def test_integrations_coralogix_add(tmp_path): - f = _coralogix_input(tmp_path) - - cli(["integrations", "coralogix", "delete-all"]) - cli(["integrations", "coralogix", "add", "-f", str(f)]) - cli(["integrations", "coralogix", "get", "-a", "test"]) - cli(["integrations", "coralogix", "get-all"]) - cli(["integrations", "coralogix", "get-default"]) - - cli(["integrations", "coralogix", "update", "-a", "test", "-f", str(f)]) - cli(["integrations", "coralogix", "delete", "-a", "test"]) - - f = tmp_path / "test_integrations_coralogix_update_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "test", - "apiKey": ${coralogix_api_key}, - "isDefault": true, - "region": "US1" - }, - { - "alias": "test-2", - "apiKey": ${coralogix_api_key}, - "isDefault": true, - "region": "US2" - } - ] - } - """) - content = template.substitute(coralogix_api_key=coralogix_api_key) - f.write_text(content) - cli(["integrations", "coralogix", "add-multiple", "-f", str(f)]) - cli(["integrations", "coralogix", "delete-all"]) - -@responses.activate -def test_integrations_coralogix_validate(tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "coralogix", "validate", "-a", "test"]) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) - cli(["integrations", "coralogix", "validate-all"]) diff --git a/tests.orig/test_integrations_github.py b/tests.orig/test_integrations_github.py deleted file mode 100644 index 3ce9976..0000000 --- a/tests.orig/test_integrations_github.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Tests for github integration commands. -""" -from cortexapps_cli.cortex import cli -import os -import pytest -import responses - -# Since responses are all mocked and no data validation is done by the CLI -- -# we let the API handle validation -- we don't need valid input files. -def _dummy_file(tmp_path): - f = tmp_path / "test.json" - f.write_text("foobar") - return f - -@responses.activate -def test_integrations_github_add_personal(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/personal", status=200) - cli(["integrations", "github", "add-personal", "-f", str(f)]) - -@responses.activate -def test_integrations_github_update_personal(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/personal/pt-001", status=200) - cli(["integrations", "github", "update-personal", "-a", "pt-001", "-f", str(f)]) - -@responses.activate -def test_integrations_github_get_personal(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/personal/pt-001", status=200) - cli(["integrations", "github", "get-personal", "-a", "pt-001"]) - -@responses.activate -def test_integrations_github_add(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/app", status=200) - cli(["integrations", "github", "add", "-f", str(f)]) - -@responses.activate -def test_integrations_github_get(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/app/pt-001", status=200) - cli(["integrations", "github", "get", "-a", "pt-001"]) - -@responses.activate -def test_integrations_github_get_all(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", status=200) - cli(["integrations", "github", "get-all"]) - -@responses.activate -def test_integrations_github_get_default(capsys, tmp_path): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/default-configuration", status=200) - cli(["integrations", "github", "get-default"]) - -@responses.activate -def test_integrations_github_validate(capsys, tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/validate/pt-001", status=200) - cli(["integrations", "github", "validate", "-a", "pt-001"]) - -@responses.activate -def test_integrations_github_validate_all(capsys, tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/validate", status=200) - cli(["integrations", "github", "validate-all"]) - -@responses.activate -def test_integrations_github_update(capsys, tmp_path): - f = _dummy_file(tmp_path) - - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations/app/pt-001", status=200) - cli(["integrations", "github", "update", "-a", "pt-001", "-f", str(f)]) - -@responses.activate -def test_integrations_github_delete_all(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", status=200) - cli(["integrations", "github", "delete-all"]) diff --git a/tests.orig/test_integrations_github_update.json b/tests.orig/test_integrations_github_update.json deleted file mode 100644 index 4d85745..0000000 --- a/tests.orig/test_integrations_github_update.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "alias": "github-test-3", - "isDefault": false -} diff --git a/tests.orig/test_integrations_gitlab.py b/tests.orig/test_integrations_gitlab.py deleted file mode 100644 index e7809a9..0000000 --- a/tests.orig/test_integrations_gitlab.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Tests for gitlab integration commands. -""" -from cortexapps_cli.cortex import cli -import os -import sys -from string import Template - -def test_integrations_gitlab(tmp_path): - cli(["integrations", "gitlab", "delete-all"]) - - gitlab_personal_token = os.getenv('GITLAB_PERSONAL_TOKEN') - f = tmp_path / "test_integrations_gitlab_add.json" - template = Template("""{ - "alias": "cortex-test", - "groupNames": [ - ], - "hidePersonalProjects": false, - "isDefault": true, - "personalAccessToken": "${gitlab_personal_token}" - } - """) - content = template.substitute(gitlab_personal_token=gitlab_personal_token) - f.write_text(content) - cli(["integrations", "gitlab", "add", "-f", str(f)]) - - cli(["integrations", "gitlab", "get", "-a", "cortex-test"]) - - cli(["integrations", "gitlab", "get-all"]) - - cli(["integrations", "gitlab", "get-default"]) - - cli(["integrations", "gitlab", "validate", "-a", "cortex-test"]) - - cli(["integrations", "gitlab", "validate-all"]) - - cli(["integrations", "gitlab", "update", "-a", "cortex-test", "-f", "tests/test_integrations_gitlab_update.json"]) - - cli(["integrations", "gitlab", "add-multiple", "-f", "tests/test_integrations_gitlab_add_multiple.json"]) - - cli(["integrations", "gitlab", "delete", "-a", "cortex-test-2"]) - - cli(["integrations", "gitlab", "delete-all"]) diff --git a/tests.orig/test_integrations_gitlab_add_multiple.json b/tests.orig/test_integrations_gitlab_add_multiple.json deleted file mode 100644 index be6ed5c..0000000 --- a/tests.orig/test_integrations_gitlab_add_multiple.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "configurations": [ - { - "alias": "cortex-test-2", - "groupNames": [ - ], - "hidePersonalProjects": false, - "isDefault": false, - "personalAccessToken": "bogusToken1" - }, - { - "alias": "cortex-test-3", - "groupNames": [ - ], - "hidePersonalProjects": false, - "isDefault": false, - "personalAccessToken": "bogusToken2" - } - ] -} diff --git a/tests.orig/test_integrations_gitlab_update.json b/tests.orig/test_integrations_gitlab_update.json deleted file mode 100644 index 905a854..0000000 --- a/tests.orig/test_integrations_gitlab_update.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "alias": "cortex-test-1", - "groupNames": [ - ], - "hidePersonalProjects": false, - "host": "null", - "isDefault": true -} diff --git a/tests.orig/test_integrations_incidentio.py b/tests.orig/test_integrations_incidentio.py deleted file mode 100644 index 7ce9d79..0000000 --- a/tests.orig/test_integrations_incidentio.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Tests for incident.io integration commands. - -These tests all use mock responses. -""" -from cortexapps_cli.cortex import cli -import json -import os -import responses - -# Since responses are all mocked and no data validation is done by the CLI -- -# we let the API handle validation -- we don't need valid input files. -def _dummy_file(tmp_path): - f = tmp_path / "test_integrations_incidentio_add.json" - f.write_text("foobar") - return f - -@responses.activate -def test_integrations_incidentio_add(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "add", "-f", str(f)]) - -@responses.activate -def test_integrations_incidentio_add_multiple(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "add-multiple", "-f", str(f)]) - -@responses.activate -def test_integrations_incidentio_delete(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", status=200) - cli(["integrations", "incidentio", "delete", "-a", "test"]) - -@responses.activate -def test_integrations_incidentio_delete_all(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", status=200) - cli(["integrations", "incidentio", "delete-all"]) - -@responses.activate -def test_integrations_incidentio_get(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "get", "-a", "test"]) - -@responses.activate -def test_integrations_incidentio_get_all(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "get-all"]) - -@responses.activate -def test_integrations_incidentio_get_default(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/default-configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "incidentio", "get-default"]) - -@responses.activate -def test_integrations_incidentio_update(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/test", json=[{'alias:': 'test', 'isDefault': json.dumps("true")}], status=200) - cli(["integrations", "incidentio", "update", "-a", "test", "-f", str(f)]) - -@responses.activate -def test_integrations_incidentio_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "incidentio", "validate", "-a", "test"]) - -@responses.activate -def test_integrations_incidentio_validate_all(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) - cli(["integrations", "incidentio", "validate-all"]) diff --git a/tests.orig/test_integrations_launchdarkly.py b/tests.orig/test_integrations_launchdarkly.py deleted file mode 100644 index 030672a..0000000 --- a/tests.orig/test_integrations_launchdarkly.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Tests for launchdarkly integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import json -import os -import pytest -import responses - -launchdarkly_api_key = json.dumps("fakeKey") - -def _launchdarkly_input(tmp_path): - f = tmp_path / "test_integrations_launchdarkly_add.json" - template = Template(""" - { - "alias": "test", - "apiKey": ${launchdarkly_api_key}, - "environment": "DEFAULT", - "isDefault": true - } - """) - content = template.substitute(launchdarkly_api_key=launchdarkly_api_key) - f.write_text(content) - return f - -def test_integrations_launchdarkly_add(tmp_path): - f = _launchdarkly_input(tmp_path) - - cli(["integrations", "launchdarkly", "delete-all"]) - cli(["integrations", "launchdarkly", "add", "-f", str(f)]) - cli(["integrations", "launchdarkly", "get", "-a", "test"]) - cli(["integrations", "launchdarkly", "get-all"]) - cli(["integrations", "launchdarkly", "get-default"]) - - cli(["integrations", "launchdarkly", "update", "-a", "test", "-f", str(f)]) - cli(["integrations", "launchdarkly", "delete", "-a", "test"]) - - f = tmp_path / "test_integrations_launchdarkly_update_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "test", - "apiKey": ${launchdarkly_api_key}, - "environment": "DEFAULT", - "isDefault": true - }, - { - "alias": "test-2", - "apiKey": ${launchdarkly_api_key}, - "environment": "FEDERAL", - "isDefault": false - } - ] - } - """) - content = template.substitute(launchdarkly_api_key=launchdarkly_api_key) - f.write_text(content) - cli(["integrations", "launchdarkly", "add-multiple", "-f", str(f)]) - cli(["integrations", "launchdarkly", "delete-all"]) - -@responses.activate -def test_integrations_launchdarkly_validate(tmp_path): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "launchdarkly", "validate", "-a", "test"]) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) - cli(["integrations", "launchdarkly", "validate-all"]) diff --git a/tests.orig/test_integrations_newrelic.py b/tests.orig/test_integrations_newrelic.py deleted file mode 100644 index 4f62368..0000000 --- a/tests.orig/test_integrations_newrelic.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Tests for newrelic integration commands. - -These tests all use mock responses. -""" -from cortexapps_cli.cortex import cli -import json -import responses -import os - -# Since responses are all mocked and no data validation is done by the CLI -- -# we let the API handle validation -- we don't need valid input files. -def _dummy_file(tmp_path): - f = tmp_path / "test_integrations_newrelic_add.json" - f.write_text("foobar") - return f - -@responses.activate -def test_integrations_newrelic_add(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "add", "-f", str(f)]) - -@responses.activate -def test_integrations_newrelic_add_multiple(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "add-multiple", "-f", str(f)]) - -@responses.activate -def test_integrations_newrelic_delete(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", status=200) - cli(["integrations", "newrelic", "delete", "-a", "test"]) - -@responses.activate -def test_integrations_newrelic_delete_all(): - responses.add(responses.DELETE, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", status=200) - cli(["integrations", "newrelic", "delete-all"]) - -@responses.activate -def test_integrations_newrelic_get(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "get", "-a", "test"]) - -@responses.activate -def test_integrations_newrelic_get_all(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "get-all"]) - -@responses.activate -def test_integrations_newrelic_get_default(): - responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/default-configuration", json=[{'accountId': 123, 'alias:': 'test', 'isDefault': json.dumps("true"), 'personalKey': 'xxxx', 'region': 'US'}], status=200) - cli(["integrations", "newrelic", "get-default"]) - -@responses.activate -def test_integrations_newrelic_update(tmp_path): - f = _dummy_file(tmp_path) - responses.add(responses.PUT, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/test", json=[{'alias:': 'test', 'isDefault': json.dumps("true")}], status=200) - cli(["integrations", "newrelic", "update", "-a", "test", "-f", str(f)]) - -@responses.activate -def test_integrations_newrelic_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate/test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "newrelic", "validate", "-a", "test"]) - -@responses.activate -def test_integrations_newrelic_validate_all(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configuration/validate", json=[ { 'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}], status=200) - cli(["integrations", "newrelic", "validate-all"]) diff --git a/tests.orig/test_integrations_pagerduty.py b/tests.orig/test_integrations_pagerduty.py deleted file mode 100644 index 334fe0b..0000000 --- a/tests.orig/test_integrations_pagerduty.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -Tests for pagerduty integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import os - -pagerduty_token = os.getenv('PAGERDUTY_TOKEN') - -def test_integrations_pagerduty(tmp_path): - f = tmp_path / "test_integrations_pagerduty_add.json" - template = Template(""" - { - "isTokenReadonly": true, - "token": "${pagerduty_token}" - } - """) - content = template.substitute(pagerduty_token=pagerduty_token) - f.write_text(content) - - cli(["integrations", "pagerduty", "delete"]) - cli(["integrations", "pagerduty", "add", "-f", str(f)]) - cli(["integrations", "pagerduty", "get"]) - cli(["integrations", "pagerduty", "validate"]) - cli(["integrations", "pagerduty", "delete"]) - diff --git a/tests.orig/test_integrations_prometheus.py b/tests.orig/test_integrations_prometheus.py deleted file mode 100644 index b0c7730..0000000 --- a/tests.orig/test_integrations_prometheus.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -Tests for prometheus integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import os - -prometheus_host = os.getenv('PROMETHEUS_HOST') -prometheus_password = os.getenv('PROMETHEUS_PASSWORD') -prometheus_user = os.getenv('PROMETHEUS_USER') - -def _prometheus_input(tmp_path): - f = tmp_path / "test_integrations_prometheus_add.json" - template = Template(""" - { - "alias": "cortex-test", - "host": "${prometheus_host}", - "isDefault": true, - "password": "${prometheus_password}", - "prometheusTenantId": "string", - "username": "${prometheus_user}" - } - """) - content = template.substitute(prometheus_host=prometheus_host, prometheus_password=prometheus_password, prometheus_user=prometheus_user) - f.write_text(content) - return f - - cli(["integrations", "prometheus", "delete-all"]) - - f = _prometheus_input(tmp_path) - cli(["integrations", "prometheus", "add", "-f", str(f)]) - - cli(["integrations", "prometheus", "get", "-a", "cortex-test"]) - - cli(["integrations", "prometheus", "get-all"]) - - cli(["integrations", "prometheus", "get-default"]) - - f = _prometheus_input(tmp_path) - cli(["integrations", "prometheus", "update", "-a", "cortex-test", "-f", str(f)]) - - cli(["integrations", "prometheus", "delete", "-a", "cortex-test"]) - - f = tmp_path / "test_integrations_prometheus_add_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "cortex-test-2", - "host": "${prometheus_host}", - "isDefault": false, - "password": "${prometheus_password}", - "prometheusTenantId": "string", - "username": "${prometheus_user}" - }, - { - "alias": "cortex-test-3", - "host": "${prometheus_host}", - "isDefault": false, - "password": "${prometheus_password}", - "prometheusTenantId": "string", - "username": "${prometheus_user}" - } - ] - } - """) - content = template.substitute(prometheus_host=prometheus_host, prometheus_password=prometheus_password, prometheus_user=prometheus_user) - f.write_text(content) - cli(["integrations", "prometheus", "add-multiple", "-f", str(f)]) - - cli(["integrations", "prometheus", "delete-all"]) - diff --git a/tests.orig/test_integrations_sonarqube.py b/tests.orig/test_integrations_sonarqube.py deleted file mode 100644 index b84c7d3..0000000 --- a/tests.orig/test_integrations_sonarqube.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -Tests for sonarqube integration commands. -""" -from cortexapps_cli.cortex import cli -from string import Template -import json -import os -import pytest -import responses - -sonarqube_host = os.getenv('SONARQUBE_HOST') -sonarqube_personal_token = os.getenv('SONARQUBE_PERSONAL_TOKEN') - -def _sonarqube_input(tmp_path): - f = tmp_path / "test_integrations_sonarqube_add.json" - template = Template(""" - { - "alias": "cortex-test", - "host": "${sonarqube_host}", - "isDefault": true, - "token": "${sonarqube_personal_token}" - } - """) - content = template.substitute(sonarqube_host=sonarqube_host, sonarqube_personal_token=sonarqube_personal_token) - f.write_text(content) - return f - -def test_integrations_sonarqube(tmp_path): - cli(["integrations", "sonarqube", "delete-all"]) - - f = _sonarqube_input(tmp_path) - cli(["integrations", "sonarqube", "add", "-f", str(f)]) - - cli(["integrations", "sonarqube", "get", "-a", "cortex-test"]) - - cli(["integrations", "sonarqube", "get-all"]) - - cli(["integrations", "sonarqube", "get-default"]) - - f = _sonarqube_input(tmp_path) - cli(["integrations", "sonarqube", "update", "-a", "cortex-test", "-f", str(f)]) - - cli(["integrations", "sonarqube", "delete", "-a", "cortex-test"]) - - f = tmp_path / "test_integrations_sonarqube_add_multiple.json" - template = Template(""" - { - "configurations": [ - { - "alias": "cortex-test-2", - "host": "${sonarqube_host}", - "isDefault": true, - "token": "${sonarqube_personal_token}" - }, - { - "alias": "cortex-test-3", - "host": "${sonarqube_host}", - "isDefault": true, - "token": "${sonarqube_personal_token}" - } - ] - } - """) - content = template.substitute(sonarqube_host=sonarqube_host, sonarqube_personal_token=sonarqube_personal_token) - f.write_text(content) - cli(["integrations", "sonarqube", "add-multiple", "-f", str(f)]) - -@responses.activate -def test_integrations_sonarqube_validate(): - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate/cortex-test", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "sonarqube", "validate", "-a", "cortex-test"]) - - responses.add(responses.POST, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configuration/validate", json={'alias': 'test', 'isValid': json.dumps("true"), 'message': 'someMessage'}, status=200) - cli(["integrations", "sonarqube", "validate-all"]) diff --git a/tests.orig/test_invalid-service.yaml b/tests.orig/test_invalid-service.yaml deleted file mode 100644 index ab87147..0000000 --- a/tests.orig/test_invalid-service.yaml +++ /dev/null @@ -1,8 +0,0 @@ -openapi: 3.0.1 -info: - title: Invalid Service to test dryrun - x-cortex-bag: invalid-service - x-cortex-type: service - x-cortex-owners: - - type: EMAIL - email: snoop.dogg@cortex.io diff --git a/tests.orig/test_ip_allowlist.py b/tests.orig/test_ip_allowlist.py deleted file mode 100644 index 72f910c..0000000 --- a/tests.orig/test_ip_allowlist.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Tests for ip-allowlist commands. -""" -from cortexapps_cli.cortex import cli -import requests -import pytest -from string import Template - -def _ip_allowlist_input(tmp_path): - ip_address = requests.get("https://ip.me").text.strip() - f = tmp_path / "test_ip_allowlist_input.json" - template = Template(""" - { - "entries": [ - { - "address": "${ip_address}", - "description": "string" - } - ] - } - """) - content = template.substitute(ip_address=ip_address) - f.write_text(content) - return f - -def test(capsys, tmp_path): - cli(["ip-allowlist", "get"]) - - f = _ip_allowlist_input(tmp_path) - cli(["ip-allowlist", "validate", "-f", str(f)]) - - f = _ip_allowlist_input(tmp_path) - cli(["ip-allowlist", "replace", "-f", str(f)]) - - cli(["ip-allowlist", "replace", "-f", "tests/test_ip_allowlist_empty.json"]) - - with pytest.raises(SystemExit) as excinfo: - cli(["ip-allowlist", "validate", "-f", "tests/test_ip_allowlist_invalid.json"]) - out, err = capsys.readouterr() - response = json.loads(out) - #print(err) - #assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" - assert out == "Bad Request" - assert excinfo.value.code == 400 diff --git a/tests.orig/test_ip_allowlist_empty.json b/tests.orig/test_ip_allowlist_empty.json deleted file mode 100644 index 3fde4b2..0000000 --- a/tests.orig/test_ip_allowlist_empty.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "entries": [ - ] -} diff --git a/tests.orig/test_ip_allowlist_invalid.json b/tests.orig/test_ip_allowlist_invalid.json deleted file mode 100644 index bc9c040..0000000 --- a/tests.orig/test_ip_allowlist_invalid.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "entries": [ - { - "addressList": "10.0.0.1", - "description": "string" - } - ] - } diff --git a/tests.orig/test_packages.py b/tests.orig/test_packages.py deleted file mode 100644 index 3a50533..0000000 --- a/tests.orig/test_packages.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -Tests for packages commands. -""" -from cortexapps_cli.cortex import cli - -def test_packages(): - cli(["packages", "go", "upload", "-t", "test-service", "-f", "tests/test_packages_go.sum"]) - - cli(["packages", "java", "upload-single", "-t", "test-service", "-f", "tests/test_packages_java_single.json"]) - - cli(["packages", "java", "upload-multiple", "-t", "test-service", "-f", "tests/test_packages_java_multiple.json"]) - - cli(["packages", "python", "upload-pipfile", "-t", "test-service", "-f", "tests/test_packages_python_pipfile.lock"]) - - cli(["packages", "python", "upload-requirements", "-t", "test-service", "-f", "tests/test_packages_python_requirements.txt"]) - - cli(["packages", "node", "upload-package", "-t", "test-service", "-f", "tests/test_packages_node_package.json"]) - - cli(["packages", "node", "upload-package-lock", "-t", "test-service", "-f", "tests/test_packages_node_package_lock.json"]) - - cli(["packages", "node", "upload-yarn-lock", "-t", "test-service", "-f", "tests/test_packages_node_yarn.lock"]) - - cli(["packages", "list", "-t", "test-service"]) - - cli(["packages", "java", "delete", "-t", "test-service", "-n", "io.cortex.teams"]) - - cli(["packages", "python", "delete", "-t", "test-service", "-n", "cycler"]) - - cli(["packages", "node", "delete", "-t", "test-service", "-n", "inter-angular"]) - - cli(["packages", "list", "-t", "test-service"]) - - cli(["packages", "nuget", "upload-packages-lock", "-t", "test-service", "-f", "tests/test_packages_nuget_packages_lock.json"]) - - cli(["packages", "nuget", "upload-csproj", "-t", "test-service", "-f", "tests/test_packages_nuget.csproj"]) diff --git a/tests.orig/test_packages_go.sum b/tests.orig/test_packages_go.sum deleted file mode 100644 index 6fc50b0..0000000 --- a/tests.orig/test_packages_go.sum +++ /dev/null @@ -1,2 +0,0 @@ -github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/cortex.io/catalog v4.5.6+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= diff --git a/tests.orig/test_packages_java_multiple.json b/tests.orig/test_packages_java_multiple.json deleted file mode 100644 index e2e0c14..0000000 --- a/tests.orig/test_packages_java_multiple.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "name": "io.cortex.catalog", - "version": "4.5.6" - }, - { - "name": "io.cortex.teams", - "version": "3.3.3" - } -] diff --git a/tests.orig/test_packages_java_single.json b/tests.orig/test_packages_java_single.json deleted file mode 100644 index d89a92f..0000000 --- a/tests.orig/test_packages_java_single.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "io.cortex.scorecards", - "version": "1.2.3" -} diff --git a/tests.orig/test_packages_node_package.json b/tests.orig/test_packages_node_package.json deleted file mode 100644 index 31e2e7d..0000000 --- a/tests.orig/test_packages_node_package.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "name": "app", - "version": "0.0.0", - "private": true, - "scripts": { - "start": "node ./bin/www" - }, - "dependencies": { - "clean-css": "^4.1.11", - "constantinople": "^3.1.1", - "cookie-parser": "~1.4.4", - "debug": "~2.6.9", - "express": "~4.16.1", - "http-errors": "~1.6.3", - "lorem-ipsum": "^2.0.3", - "md5": "^2.2.1", - "mersenne-twister": "^1.1.0", - "morgan": "~1.9.1", - "pug": "^2.0.4", - "uglify-js": "^2.6.0" - } -} diff --git a/tests.orig/test_packages_node_package_lock.json b/tests.orig/test_packages_node_package_lock.json deleted file mode 100644 index b32fe75..0000000 --- a/tests.orig/test_packages_node_package_lock.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "intern-angular", - "version": "1.0.0", - "lockfileVersion": 1, - "dependencies": { - "@angular/animations": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-4.2.6.tgz", - "integrity": "sha1-nZyAoRmwwDaTy9I7uvcosVMf/8c=" - }, - "@angular/common": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@angular/common/-/common-4.2.6.tgz", - "integrity": "sha1-IQrOS9JON1+LQbpS/rNLGKiH1do=" - } - } -} diff --git a/tests.orig/test_packages_node_yarn.lock b/tests.orig/test_packages_node_yarn.lock deleted file mode 100644 index 980810c..0000000 --- a/tests.orig/test_packages_node_yarn.lock +++ /dev/null @@ -1,19 +0,0 @@ -"@types/babel-types@*", "@types/babel-types@^7.0.0": - version "7.0.7" - resolved "https://registry.yarnpkg.com/@types/babel-types/-/babel-types-7.0.7.tgz#667eb1640e8039436028055737d2b9986ee336e3" - integrity sha512-dBtBbrc+qTHy1WdfHYjBwRln4+LWqASWakLHsWHR2NWHIFkv4W3O070IGoGLEBrJBvct3r0L1BUPuvURi7kYUQ== - -"@types/babylon@^6.16.2": - version "6.16.5" - resolved "https://registry.yarnpkg.com/@types/babylon/-/babylon-6.16.5.tgz#1c5641db69eb8cdf378edd25b4be7754beeb48b4" - integrity sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w== - dependencies: - "@types/babel-types" "*" - -accepts@~1.3.5: - version "1.3.7" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" - integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== - dependencies: - mime-types "~2.1.24" - negotiator "0.6.2" diff --git a/tests.orig/test_packages_nuget.csproj b/tests.orig/test_packages_nuget.csproj deleted file mode 100644 index 72b9a92..0000000 --- a/tests.orig/test_packages_nuget.csproj +++ /dev/null @@ -1,22 +0,0 @@ - - - - - 0.20.0 - - - 0.3.0 - runtime; build; native; contentfiles; analyzers - all - - - 7.1.1 - - - 1.1.1 - - - 4.5.0 - - - diff --git a/tests.orig/test_packages_nuget_packages_lock.json b/tests.orig/test_packages_nuget_packages_lock.json deleted file mode 100644 index 3b09240..0000000 --- a/tests.orig/test_packages_nuget_packages_lock.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "version": 1, - "dependencies": { - ".NETCoreApp,Version=v3.1": { - "Microsoft.NETFramework.ReferenceAssemblies": { - "type": "Direct", - "requested": "[1.0.0, )", - "resolved": "1.0.0", - "contentHash": "7D2TMufjGiowmt0E941kVoTIS+GTNzaPopuzM1/1LSaJAdJdBrVP0SkZW7AgDd0a2U1DjsIeaKG1wxGVBNLDMw==" - }, - "Newtonsoft.Json": { - "type": "Direct", - "requested": "[12.0.3, )", - "resolved": "12.0.3", - "contentHash": "6mgjfnRB4jKMlzHSl+VD+oUc1IebOZabkbyWj2RiTgWwYPPuaK1H97G1sHqGwPlS5npiF5Q0OrxN1wni2n5QWg==" - } - } - } -} diff --git a/tests.orig/test_packages_python_pipfile.lock b/tests.orig/test_packages_python_pipfile.lock deleted file mode 100644 index 983cd83..0000000 --- a/tests.orig/test_packages_python_pipfile.lock +++ /dev/null @@ -1,59 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "bb57e0d7853b45999e47c163c46b95bc2fde31c527d8d7b5b5539dc979444a6d" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "certifi": { - "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" - ], - "index": "pypi", - "version": "==2022.12.7" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "requests": { - "hashes": [ - "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", - "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" - ], - "index": "pypi", - "version": "==2.21.0" - }, - "urllib3": { - "hashes": [ - "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", - "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", - "version": "==1.24.3" - } - }, - "develop": {} -} diff --git a/tests.orig/test_packages_python_requirements.txt b/tests.orig/test_packages_python_requirements.txt deleted file mode 100644 index 1d7bc68..0000000 --- a/tests.orig/test_packages_python_requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -contourpy==1.0.6 - # via matplotlib -cycler==0.11.0 - # via matplotlib -fonttools==4.43.0 - # via matplotlib -kiwisolver==1.4.4 - # via matplotlib diff --git a/tests.orig/test_plugins.json b/tests.orig/test_plugins.json deleted file mode 100644 index 3150e65..0000000 --- a/tests.orig/test_plugins.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "blob": "
", - "contexts": [ - { - "type": "GLOBAL" - }, - { - "entityFilter": { - "type": "SERVICE_FILTER" - }, - "type": "ENTITY" - } - ], - "description": "Just testin' plugin uploads", - "isDraft": false, - "minimumRoleRequired": "VIEWER", - "name": "My Test Plugin", - "tag": "my-test-plugin" -} - diff --git a/tests.orig/test_plugins.py b/tests.orig/test_plugins.py deleted file mode 100644 index 53934b0..0000000 --- a/tests.orig/test_plugins.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Tests for plugins commands. -""" -from cortexapps_cli.cortex import cli -import pytest - -@pytest.mark.skip(reason="Needs fix for CET-8598") -def test(capsys): - cli(["plugins", "get"]) - out, err = capsys.readouterr() - if (str(out).find('{"tag":"my-test-plugin"') != -1): - cli(["plugins", "delete", "-t", "my-test-plugin"]) - cli(["plugins", "create", "-f", "tests/test_plugins.json"]) - - cli(["plugins", "get"]) - - cli(["plugins", "update", "-t", "my-test-plugin", "-f", "tests/test_plugins_update.json"]) - - cli(["plugins", "get-by-tag", "-t", "my-test-plugin"]) - - cli(["plugins", "delete", "-t", "my-test-plugin"]) diff --git a/tests.orig/test_plugins_update.json b/tests.orig/test_plugins_update.json deleted file mode 100644 index 2bbb1b5..0000000 --- a/tests.orig/test_plugins_update.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "blob": "
", - "contexts": [ - { - "type": "GLOBAL" - }, - { - "entityFilter": { - "type": "SERVICE_FILTER" - }, - "type": "ENTITY" - } - ], - "description": "Just testin' plugin uploads", - "isDraft": false, - "minimumRoleRequired": "VIEWER", - "name": "My Test Plugin" -} diff --git a/tests.orig/test_queries.json b/tests.orig/test_queries.json deleted file mode 100644 index 55882fc..0000000 --- a/tests.orig/test_queries.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "query": "tag = \"cli-test-service\" and custom(\"testField\") != null" -} diff --git a/tests.orig/test_queries.py b/tests.orig/test_queries.py deleted file mode 100644 index 0e39781..0000000 --- a/tests.orig/test_queries.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Tests for queries commands. -""" -from cortexapps_cli.cortex import cli -from datetime import datetime -from datetime import timedelta -import json -import pytest -from string import Template -import sys -import subprocess - -@pytest.mark.skip(reason="Does not reliably complete within 5 minutes; will check with engineering for suggestions.") -def test_queries_run_json(tmp_path): - today = datetime.now().strftime("%m-%d-%Y-%H-%M-%S") - - f = tmp_path / "cql.json" - template = Template("""{ - "query": "tag = \\"cli-test-service\\" and custom(\\"today\\") = \\"${today}\\"" - }""") - content = template.substitute(today=today) - f.write_text(content) - - f1 = tmp_path / "custom-data-query-1.json" - template = Template(""" - { - "key": "today", - "value": "${today}" - } - """) - custom_content = template.substitute(today=today) - f1.write_text(custom_content) - - cli(["custom-data", "add", "-t", "cli-test-service", "-f", str(f1)]) - cli(["-d", "queries", "run", "-w", "-x", "300", "-f", str(f)]) - -@pytest.mark.skip(reason="Does not reliably complete within 5 minutes; will check with engineering for suggestions.") -def test_queries_run_text(tmp_path): - today = datetime.now() - yesterday = today - timedelta(days = 1) - yesterday = yesterday.strftime("%m-%d-%Y-%H-%M-%S") - - f = tmp_path / "cql.txt" - template = Template(""" - tag = "cli-test-service" and custom("yesterday") = "${yesterday}" - """) - content = template.substitute(yesterday=yesterday) - f.write_text(content) - - f1 = tmp_path / "custom-data-query-2.json" - template = Template(""" - { - "key": "yesterday", - "value": "${yesterday}" - } - """) - content = template.substitute(yesterday=yesterday) - f1.write_text(content) - - cli(["custom-data", "add", "-t", "cli-test-service", "-f", str(f1)]) - cli(["queries", "run", "-w", "-x", "300", "-f", str(f)]) - -# Verify timeout handling. If CQL query completes in 2 seconds, this test -# could fail. Could probably put in try/catch stanza. -def test_queries_run_timeout(): - with pytest.raises(SystemExit) as excinfo: - cli(["queries", "run", "-w", "-x", "2", "-f", "tests/test_queries.txt"]) diff --git a/tests.orig/test_queries.txt b/tests.orig/test_queries.txt deleted file mode 100644 index d44e612..0000000 --- a/tests.orig/test_queries.txt +++ /dev/null @@ -1 +0,0 @@ -tag = "test-service" and custom("foo") = "bar" diff --git a/tests.orig/test_resource_definitions.py b/tests.orig/test_resource_definitions.py deleted file mode 100644 index a659c29..0000000 --- a/tests.orig/test_resource_definitions.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Tests for resource-definitions commands. -""" -from cortexapps_cli.cortex import cli -import json -import sys - -def test_resource_definitions(capsys): - # Delete resource definition if it already exists. - cli(["resource-definitions", "list"]) - out, err = capsys.readouterr() - # Maybe a cleaner way to do this with json object? - if (str(out).find('{"type":"test-resource-definition"') != -1): - cli(["resource-definitions", "delete", "-t", "test-resource-definition"]) - cli(["resource-definitions", "create", "-f", "tests/test-resource-definition.json"]) - - cli(["resource-definitions", "list"]) - - cli(["resource-definitions", "get", "-t", "test-resource-definition"]) - - cli(["resource-definitions", "update", "-t", "test-resource-definition", "-f", "tests/test-resource-definition-update.json"]) diff --git a/tests.orig/test_scorecards.py b/tests.orig/test_scorecards.py deleted file mode 100644 index d7445e8..0000000 --- a/tests.orig/test_scorecards.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -Tests for scorecards commands. -""" -from cortexapps_cli.cortex import cli -import json - -def test_scorecards(): - cli(["scorecards", "create", "-f", "tests/test_scorecards.yaml"]) - - cli(["scorecards", "list"]) - - cli(["scorecards", "shield", "-s", "test-scorecard", "-t", "test-service"]) - - cli(["scorecards", "get", "-t", "test-scorecard"]) - - cli(["scorecards", "descriptor", "-t", "test-scorecard"]) - - cli(["scorecards", "next-steps", "-t", "test-scorecard", "-e", "test-service"]) - - # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. - # 2024-05-06, additionally now blocked by CET-8882 - # cli(["scorecards", "scores", "-t", "test-scorecard", "-e", "test-service"]) - - cli(["scorecards", "scores", "-t", "test-scorecard"]) - -def test_scorecards_drafts(capsys): - cli(["scorecards", "create", "-f", "tests/test_scorecards_draft.yaml"]) - # Only capturing this so it doesn't show up in next call to capsys. - out, err = capsys.readouterr() - - cli(["scorecards", "list", "-s"]) - out, err = capsys.readouterr() - - out = json.loads(out) - assert any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in out['scorecards']) diff --git a/tests.orig/test_scorecards.yaml b/tests.orig/test_scorecards.yaml deleted file mode 100644 index d9bd412..0000000 --- a/tests.orig/test_scorecards.yaml +++ /dev/null @@ -1,21 +0,0 @@ -tag: test-scorecard -name: Test Scorecard -description: Used to test Cortex CLI -draft: false -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It - filter: - category: SERVICE -filter: - query: 'entity_descriptor.info.`x-cortex-tag` = "test-service"' - category: SERVICE diff --git a/tests.orig/test_scorecards_draft.yaml b/tests.orig/test_scorecards_draft.yaml deleted file mode 100644 index c16e9c1..0000000 --- a/tests.orig/test_scorecards_draft.yaml +++ /dev/null @@ -1,19 +0,0 @@ -tag: test-scorecard-draft -name: Test Scorecard Draft -description: Used to test Cortex CLI -draft: true -ladder: - name: Default Ladder - levels: - - name: You Made It - rank: 1 - description: "\"If you ain't first, you're last. -- Ricky Bobby\" -- Scott Mullin" - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("testField") != null - weight: 1 - level: You Made It -filter: - query: entity_descriptor.info.`x-cortex-tag` = "cli-test-service" - category: SERVICE diff --git a/tests.orig/test_stdin.py b/tests.orig/test_stdin.py deleted file mode 100644 index 2ac45fc..0000000 --- a/tests.orig/test_stdin.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Tests for stdin input. -""" -import subprocess - -def test_stdin_input(capsys): - cat_process = subprocess.Popen(['cat', 'tests/test_catalog_create_service.yaml'], stdout=subprocess.PIPE) - cortex_process = subprocess.Popen(['cortexapps_cli/cortex.py', 'catalog', 'create','-f-'],stdin=cat_process.stdout, stdout=subprocess.PIPE) - out, err = cortex_process.communicate() - rc=cortex_process.wait() - assert rc == 0, "catalog test with stdin should succeed" diff --git a/tests.orig/test_teams.py b/tests.orig/test_teams.py deleted file mode 100644 index dafe20e..0000000 --- a/tests.orig/test_teams.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -Tests for teams commands. -""" -from cortexapps_cli.cortex import cli - -import json -import sys - -# Deleted this test for several reasons: -# -# 1. It's failing in release.com environments. -# 2. There appears to be a bug where teams created using the teams API are not immediately -# avaialble from teams list API. -# 3. There are plans to deprecate the teams API and manage everything with the catalog API. -# 4. You can create Cortex-managed teams with the catalog API. -# -# def test_teams_create(capsys): -# cli(["teams", "list"]) -# out, err = capsys.readouterr() -# json_data = json.loads(out) -# -# cli(["catalog", "list"]) -# out, err = capsys.readouterr() -# catalog_json_data = json.loads(out) -# -# sys.stdout.write(str(json_data)) -# -# if any(team['teamTag'] == 'cli-test-team' for team in json_data['teams']): -# sys.stdout.write("deleting cli-test-team") -# cli(["teams", "delete", "-t", "cli-test-team"]) -# -# if any(entity['tag'] == 'cli-test-team' for entity in catalog_json_data['entities']): -# sys.stdout.write("deleting catalog cli-test-team") -# cli(["catalog", "delete", "-t", "cli-test-team"]) -# -# cli(["-d", "teams", "create", "-f", "tests/test_teams.yaml"]) - -def test_teams_get(): - cli(["teams", "get", "-t", "test-team-1"]) - -def test_teams_list(): - cli(["teams", "list"]) - -def test_teams_archive(): - cli(["teams", "archive", "-t", "test-team-1"]) - cli(["teams", "unarchive", "-t", "test-team-1"]) - -def test_teams_update_metadata(): - cli(["teams", "update-metadata", "-t", "test-team-2", "-f", "tests/test_teams_update.json"]) diff --git a/tests.orig/test_teams.yaml b/tests.orig/test_teams.yaml deleted file mode 100644 index 29924a4..0000000 --- a/tests.orig/test_teams.yaml +++ /dev/null @@ -1,42 +0,0 @@ -{ - "teamTag": "cli-test-team", - "metadata": { - "name": "Stanford", - "description": "Stanford Cardinal 1998 Final Four Team", - "summary": null - }, - "links": [], - "slackChannels": [], - "additionalMembers": [ - { - "name": "Mark Madsen", - "email": "mark.madsen@cortex.io", - "description": "forward" - }, - { - "name": "Kris Weems", - "email": "kris.weems@cortex.io", - "description": "shooting guard" - }, - { - "name": "Tim Young", - "email": "tim.young@cortex.io", - "description": "center" - }, - { - "name": "Peter Sauer", - "email": "peter.sauer@cortex.io", - "description": "forward" - }, - { - "name": "Arthur Lee", - "email": "arthur.lee@cortex.io", - "description": "point guard" - } - ], - "isArchived": false, - "cortexTeam": { - "members": [] - }, - "type": "CORTEX" -} diff --git a/tests.orig/test_teams_update.json b/tests.orig/test_teams_update.json deleted file mode 100644 index aa5b528..0000000 --- a/tests.orig/test_teams_update.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "metadata": { - "name": "Stanford", - "description": "Stanford Cardinal 2023 Final Four Team", - "summary": null - }, - "links": [], - "slackChannels": [], - "additionalMembers": [ - { - "name": "Steph Curry", - "email": "steph.curry@cortex.io", - "description": "guard" - } - ], - "cortexTeam":{ - "members": [] - }, - "type": "CORTEX" -} diff --git a/tests.orig2/__init__.py b/tests.orig2/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests.orig2/common.py b/tests.orig2/common.py deleted file mode 100644 index c6f2b4a..0000000 --- a/tests.orig2/common.py +++ /dev/null @@ -1,62 +0,0 @@ -from cortex import cli -#from cortexapps_cli.cortex import cli - -from contextlib import redirect_stdout -from datetime import datetime -from datetime import timedelta -from datetime import timezone -from github import Auth -from github import Github -from string import Template -from types import SimpleNamespace -from unittest import mock -import io -import json -import os -import pytest -import random -import re -import requests -import sys -import tempfile -import textwrap -import time -import yaml -from feature_flag_check import * - -def cli_command(capsys, args, output_type="json"): - args = ["-q"] + args - - try: - cli(args) - except: - captured = capsys.readouterr() - print("cli_command: error: " + captured.err) - - out, err = capsys.readouterr() - - if output_type == "json": - return json.loads(out) - elif output_type == "text": - return out - -def today(): - return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S") - -def yesterday(): - today = datetime.now(timezone.utc) - yesterday = today - timedelta(days = 1) - return yesterday.strftime("%Y-%m-%dT%H:%M:%S") - -def packages(capsys, packageCommand, packageType, version, name, tag): - response = cli_command(capsys, ["packages", "list", "-t", tag]) - assert any(package['packageType'] == packageType and - package['version'] == version and - package['name'] == name - for package in response), "Should find " + packageType + " package with name " + name + " and version " + version + " for entity " + tag - - cli(["packages", packageCommand, "delete", "-t", tag, "-n", name]) - response = cli_command(capsys, ["packages", "list", "-t", tag]) - assert not any(package['packageType'] == packageType and - package['name'] == name - for package in response), "Should not find " + packageType + " package with name " + name diff --git a/tests.orig2/cortex_github.py b/tests.orig2/cortex_github.py deleted file mode 100644 index 95cb701..0000000 --- a/tests.orig2/cortex_github.py +++ /dev/null @@ -1,130 +0,0 @@ -from common import * - -class CortexGithub: - def __init__(self, - url=os.getenv('GH_URL'), - org=os.getenv('GH_ORG'), - repo_name=os.getenv('GH_REPO'), - cortex_alias=os.getenv('CORTEX_GH_ALIAS'), - webhook_url=os.getenv('CORTEX_GH_WEBHOOK_URL')): - - auth = Auth.Token(os.getenv('GH_PAT')) - g = Github(base_url=url, auth=auth) - - organization = g.get_organization(org) - if not any(repo.name == repo_name for repo in organization.get_repos()): - organization.create_repo(repo_name, private=True) - - repo = organization.get_repo(repo_name) - - self.org = org - self.alias = cortex_alias - self.repo = repo - self.webhook_url = webhook_url - - def delete_personal_configuration(self): - output = io.StringIO() - with redirect_stdout(output): - cli(["-q", "integrations", "github", "get-all"]) - response = json.loads(output.getvalue()) - if any(configuration['alias'] == self.alias for configuration in response['configurations']): - cli(["-q", "integrations", "github", "delete-personal", "-a", self.alias]) - - def create_integration(self): - fd, path = tempfile.mkstemp() - template = Template(""" - { - "accessToken": "${gh_pat}", - "alias": "${cortex_gh_alias}", - "isDefault": false - } - """) - content = template.substitute(gh_pat=os.getenv('GH_PAT'), cortex_gh_alias=self.alias) - with open(path, 'w') as f: - f.write(content) - - os.close(fd) - self.delete_personal_configuration() - cli(["-q", "integrations", "github", "add-personal", "-f", path]) - - def create_webhook(self): - EVENTS = ["push", "pull_request"] - - config = { - "url": self.webhook_url, - "secret": os.getenv('GH_WEBHOOK_SECRET'), - "content_type": "json" - } - - for hook in self.repo.get_hooks(): - if hook.config['url'] == self.webhook_url: - hook.delete() - - self.repo.create_hook("web", config, EVENTS, active=True) - - - def read_entity_template(self, file): - with open (file, 'r') as f: - template = Template(f.read()) - return textwrap.dedent(template.substitute(environment=os.getenv('CORTEX_ENV'), tenant=os.getenv('CORTEX_TENANT'), today=today(), org=self.org, repo=self.repo.name, alias=self.alias)) - - - # Wait max_attempts * sleep_interval for git commit to appear in gitops-logs - # Will wait for up to 10 minutes for commit to be processed. - # TODO: find out how we can optimize, or at least understand, the processing time. - def check_gitops_logs(self, capsys, sha): - found = False - #max_attempts = 120 - max_attempts = 30 - sleep_interval = 5 - for attempt in range(1, max_attempts): - response = cli_command(capsys, ["gitops-logs", "get", "-p", "0", "-z", "25"]) - if any(log['commit'] == sha for log in response['logs']): - found = True - break - else: - if attempt == max_attempts: - break - time.sleep(sleep_interval) - - return found - - def commit_cortex_entity(self, repo, content, branch, path): - contents = repo.get_contents("") - - found = False - while contents: - file_content = contents.pop(0) - if file_content.path == path: - found = True - break - if file_content.type == "dir": - contents.extend(repo.get_contents(file_content.path)) - - commit_message = "commit on " + today() + "." - - # https://github.com/PyGithub/PyGithub/issues/1787 - # Seeing some 409 errors with this. Might need a sleep here? Doesn't seem like a great solution. - # Maybe the python implementation gets confused when multiple invocations run in parallel, as happens - # with the pytests running in parallel and the API is called at the same time? - time.sleep(random.randint(1, 10)) - if found: - contents = repo.get_contents(path, ref=branch) - c = repo.update_file(path, commit_message, content, contents.sha, branch=branch) - else: - # TODO - how to create initial file in repo? - c = repo.create_file(path, commit_message, content, branch=branch) - - return c['commit'].sha - - -def gitops_add(capsys, template, path): - g = CortexGithub() - content = g.read_entity_template(template) - sha = g.commit_cortex_entity(g.repo, content, g.repo.default_branch, path) - return g.check_gitops_logs(capsys, sha) - -def github_setup(): - g = CortexGithub() - g.create_webhook() - g.create_integration() diff --git a/tests.orig2/feature_flag_check.py b/tests.orig2/feature_flag_check.py deleted file mode 100644 index dd66b59..0000000 --- a/tests.orig2/feature_flag_check.py +++ /dev/null @@ -1,34 +0,0 @@ -import sys -import os -import json -import traceback - -# QUESTION: should we introduce a flag that ignores whether or not the flag is set? -# In other words, do we want to ensure a certain set of account flags is set and, if not, -# cause the test to fail? - -def read_file(): - feature_flag_file = os.getenv('FEATURE_FLAG_EXPORT') - f = open(feature_flag_file) - data = json.load(f) - f.close() - return data - -def check_boolean_flag(): - data = read_file() - # This little nugget returns the calling function. The calling function - # should map to a lowercase value of the boolean-controlled account flag. - calling_function = traceback.extract_stack(None, 2)[0][2] - flag = calling_function.upper() - return any(f['flag'] == flag and f['value'] == True for f in data) - -def enable_cql_v2(): - return check_boolean_flag() - -def allow_team_entities_in_catalog_api(): - return check_boolean_flag() - -def enable_ui_editing(entity_type): - data = read_file() - return any(f['flag'] == "ENABLE_ENTITY_UI_EDITING" and f['value'][entity_type] == True for f in data) - #return data['ENABLE_UI_EDITING'][entity_type] == true diff --git a/tests.orig2/feature_flag_dump.py b/tests.orig2/feature_flag_dump.py deleted file mode 100644 index 1a878e9..0000000 --- a/tests.orig2/feature_flag_dump.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -import requests -import sys - -file = sys.argv[1] - -h = { - "Authorization": "Bearer " + os.getenv('CORTEX_API_KEY') -} - -url = os.getenv('CORTEX_BASE_URL') + "/api/internal/v1/cortex/preferences" - -try: - r = requests.get(url, headers=h) - r.raise_for_status() -except requests.exceptions.RequestException as e: - print(e.response.text) - sys.exit(1) - -print("feature flags = " + r.text) -f = open(file, "w") -f.write(r.text) -f.close() diff --git a/tests.orig2/github_setup.py b/tests.orig2/github_setup.py deleted file mode 100644 index 1ba5a36..0000000 --- a/tests.orig2/github_setup.py +++ /dev/null @@ -1,3 +0,0 @@ -from cortex_github import * - -github_setup() diff --git a/tests.orig2/test_audit_logs.py b/tests.orig2/test_audit_logs.py deleted file mode 100644 index 7d86a75..0000000 --- a/tests.orig2/test_audit_logs.py +++ /dev/null @@ -1,5 +0,0 @@ -from tests.helpers.utils import * - -def test(): - response = json_response(["audit-logs", "get"]) - assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_dates.py b/tests.orig2/test_audit_logs_dates.py deleted file mode 100644 index 90c4eca..0000000 --- a/tests.orig2/test_audit_logs_dates.py +++ /dev/null @@ -1,7 +0,0 @@ -from tests.helpers.utils import * - -def test(): - end_date = today() - start_date = yesterday() - response = json_response(["audit-logs", "get", "-s", start_date, "-e", end_date]) - assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_end_date.py b/tests.orig2/test_audit_logs_end_date.py deleted file mode 100644 index c5ab52d..0000000 --- a/tests.orig2/test_audit_logs_end_date.py +++ /dev/null @@ -1,6 +0,0 @@ -from tests.helpers.utils import * - -def test(): - end_date = today() - response = json_response(["audit-logs", "get", "-e", end_date]) - assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_page.py b/tests.orig2/test_audit_logs_page.py deleted file mode 100644 index c47e476..0000000 --- a/tests.orig2/test_audit_logs_page.py +++ /dev/null @@ -1,5 +0,0 @@ -from tests.helpers.utils import * - -def test(): - response = json_response(["audit-logs", "get", "-p", "0"]) - assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_audit_logs_size.py b/tests.orig2/test_audit_logs_size.py deleted file mode 100644 index 6d975da..0000000 --- a/tests.orig2/test_audit_logs_size.py +++ /dev/null @@ -1,5 +0,0 @@ -from tests.helpers.utils import * - -def test(): - response = json_response(["audit-logs", "get", "-p", "0", "-z", "1"]) - assert (len(response['logs']) == 1) diff --git a/tests.orig2/test_audit_logs_start_date.py b/tests.orig2/test_audit_logs_start_date.py deleted file mode 100644 index 0b73cf2..0000000 --- a/tests.orig2/test_audit_logs_start_date.py +++ /dev/null @@ -1,6 +0,0 @@ -from tests.helpers.utils import * - -def test(): - start_date = yesterday() - response = json_response(["audit-logs", "get", "-s", start_date]) - assert (len(response['logs']) > 0) diff --git a/tests.orig2/test_catalog_archive_entity.py b/tests.orig2/test_catalog_archive_entity.py deleted file mode 100644 index e3e23be..0000000 --- a/tests.orig2/test_catalog_archive_entity.py +++ /dev/null @@ -1,10 +0,0 @@ -from common import * - -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/archive-entity.yaml"]) - cli(["-q", "catalog", "archive", "-t", "archive-entity"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - response = cli_command(capsys, ["catalog", "details", "-t", "archive-entity"]) - assert response['isArchived'] == True, "isArchived attribute should be true" diff --git a/tests.orig2/test_catalog_create_entity.py b/tests.orig2/test_catalog_create_entity.py deleted file mode 100644 index 079afb2..0000000 --- a/tests.orig2/test_catalog_create_entity.py +++ /dev/null @@ -1,9 +0,0 @@ -from common import * - -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/create-entity.yaml"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - response = cli_command(capsys, ["catalog", "descriptor", "-t", "create-entity"]) - assert response['info']['x-cortex-tag'] == "create-entity" diff --git a/tests.orig2/test_catalog_create_entity_viewer.py b/tests.orig2/test_catalog_create_entity_viewer.py deleted file mode 100644 index 73093a9..0000000 --- a/tests.orig2/test_catalog_create_entity_viewer.py +++ /dev/null @@ -1,11 +0,0 @@ -from common import * - -# Using a key with viewer role should be Forbidden. -@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) -def test(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "catalog", "create", "-f", "data/run-time/create-entity.yaml"]) - out, err = capsys.readouterr() - - assert out == "Forbidden" - assert excinfo.value.code == 403 diff --git a/tests.orig2/test_catalog_delete_entity.py b/tests.orig2/test_catalog_delete_entity.py deleted file mode 100644 index d73128e..0000000 --- a/tests.orig2/test_catalog_delete_entity.py +++ /dev/null @@ -1,16 +0,0 @@ -from common import * - -def test(capsys): - cli_command(capsys, ["catalog", "create", "-f", "data/run-time/delete-entity.yaml"]) - response = cli_command(capsys, ["catalog", "details", "-t", "delete-entity"]) - assert response['tag'] == 'delete-entity', "Should find newly created entity" - - cli(["-q", "catalog", "delete", "-t", "delete-entity"]) - - # Since entity is deleted, cli command should exit with a Not Found, 404 error. - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "details", "-t", "delete-entity"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 diff --git a/tests.orig2/test_catalog_get_entity_details.py b/tests.orig2/test_catalog_get_entity_details.py deleted file mode 100644 index d4ad5f1..0000000 --- a/tests.orig2/test_catalog_get_entity_details.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) - assert response['tag'] == 'backend-worker', "Entity details should be returned" diff --git a/tests.orig2/test_catalog_get_entity_details_hierarchy.py b/tests.orig2/test_catalog_get_entity_details_hierarchy.py deleted file mode 100644 index abc3fa2..0000000 --- a/tests.orig2/test_catalog_get_entity_details_hierarchy.py +++ /dev/null @@ -1,6 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-i", "groups", "-t", "sso-integration"]) - assert response['hierarchy']['parents'][0]['groups'][0] == 'public-api-test', "Entity groups should be in response" - assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'public-api-test', "Parent groups should be in response" diff --git a/tests.orig2/test_catalog_include_links.py b/tests.orig2/test_catalog_include_links.py deleted file mode 100644 index 2d7c960..0000000 --- a/tests.orig2/test_catalog_include_links.py +++ /dev/null @@ -1,8 +0,0 @@ -from common import * - -# Too brittle if we assume only one entity has group 'include-links-test'? -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "include-links-test"]) - assert (len(response['entities'][0]['links']) == 0) - response = cli_command(capsys, ["catalog", "list", "-g", "include-links-test", "-l"]) - assert (len(response['entities'][0]['links']) > 0) diff --git a/tests.orig2/test_catalog_include_metadata.py b/tests.orig2/test_catalog_include_metadata.py deleted file mode 100644 index e1c212e..0000000 --- a/tests.orig2/test_catalog_include_metadata.py +++ /dev/null @@ -1,8 +0,0 @@ -from common import * - -# Too brittle if we assume only one entity has group 'include-metadata-test'? -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "include-metadata-test"]) - assert (len(response['entities'][0]['metadata']) == 0) - response = cli_command(capsys, ["catalog", "list", "-g", "include-metadata-test", "-m"]) - assert (len(response['entities'][0]['metadata']) > 0) diff --git a/tests.orig2/test_catalog_include_nested_fields.py b/tests.orig2/test_catalog_include_nested_fields.py deleted file mode 100644 index 3130d96..0000000 --- a/tests.orig2/test_catalog_include_nested_fields.py +++ /dev/null @@ -1,8 +0,0 @@ -from common import * - -@pytest.mark.skipif(allow_team_entities_in_catalog_api() == False, reason="Account flag ALLOW_TEAM_ENTITIES_IN_CATALOG_API is not set") -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-io", "-in", "team:members"]) - list = [entity for entity in response['entities'] if entity['tag'] == "search-experience"] - assert not list == None, "found search-experience entity in response" - assert len(list[0]['members']) > 0, "response has non-empty array of members" diff --git a/tests.orig2/test_catalog_invalid_page_size.py b/tests.orig2/test_catalog_invalid_page_size.py deleted file mode 100644 index 9262a7f..0000000 --- a/tests.orig2/test_catalog_invalid_page_size.py +++ /dev/null @@ -1,9 +0,0 @@ -from common import * - -def test(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "list", "-z", "1001"]) - out, err = capsys.readouterr() - - assert "Page size must be set between 1 and 1000; requested value: 1005" in out, "Should get error text about invalid page parameter" - assert excinfo.value.code == 400, "Page size greater than 100 should result in a Bad Request error, http code 400" diff --git a/tests.orig2/test_catalog_list_by_github_repo.py b/tests.orig2/test_catalog_list_by_github_repo.py deleted file mode 100644 index 195b318..0000000 --- a/tests.orig2/test_catalog_list_by_github_repo.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-r", "my-org/my-repo"]) - assert (response['total'] == 1) diff --git a/tests.orig2/test_catalog_list_by_group_multiple.py b/tests.orig2/test_catalog_list_by_group_multiple.py deleted file mode 100644 index cc7f115..0000000 --- a/tests.orig2/test_catalog_list_by_group_multiple.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test-group-1,public-api-test-group-2"]) - assert (response['total'] == 2) diff --git a/tests.orig2/test_catalog_list_by_group_single.py b/tests.orig2/test_catalog_list_by_group_single.py deleted file mode 100644 index 399d790..0000000 --- a/tests.orig2/test_catalog_list_by_group_single.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test-group-1"]) - assert (response['total'] == 1) diff --git a/tests.orig2/test_catalog_list_by_owners_multiple.py b/tests.orig2/test_catalog_list_by_owners_multiple.py deleted file mode 100644 index 8307bd2..0000000 --- a/tests.orig2/test_catalog_list_by_owners_multiple.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-o", "payments-team,search-experience"]) - assert (response['total'] == 2) diff --git a/tests.orig2/test_catalog_list_by_owners_single.py b/tests.orig2/test_catalog_list_by_owners_single.py deleted file mode 100644 index b690010..0000000 --- a/tests.orig2/test_catalog_list_by_owners_single.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-o", "payments-team"]) - assert (response['total'] == 1) diff --git a/tests.orig2/test_catalog_list_by_types.py b/tests.orig2/test_catalog_list_by_types.py deleted file mode 100644 index f666b32..0000000 --- a/tests.orig2/test_catalog_list_by_types.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-t", "component"]) - assert response['total'] > 0, "Should find at least 1 entity of type 'component'" diff --git a/tests.orig2/test_catalog_list_entity_descriptors.py b/tests.orig2/test_catalog_list_entity_descriptors.py deleted file mode 100644 index cada75d..0000000 --- a/tests.orig2/test_catalog_list_entity_descriptors.py +++ /dev/null @@ -1,6 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors"]) - list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "autocomplete"] - assert list[0]['info']['x-cortex-groups'][0] == "public-api-test" diff --git a/tests.orig2/test_catalog_list_entity_descriptors_page.py b/tests.orig2/test_catalog_list_entity_descriptors_page.py deleted file mode 100644 index 1d38a1b..0000000 --- a/tests.orig2/test_catalog_list_entity_descriptors_page.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors", "-t", "component", "-p", "0", "-z", "1"]) - assert response['descriptors'][0]['info']['x-cortex-tag'] == "backend-worker" diff --git a/tests.orig2/test_catalog_list_entity_descriptors_page_size.py b/tests.orig2/test_catalog_list_entity_descriptors_page_size.py deleted file mode 100644 index 0c8446d..0000000 --- a/tests.orig2/test_catalog_list_entity_descriptors_page_size.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors", "-t", "component", "-z", "1"]) - assert (len(response['descriptors']) == 1) diff --git a/tests.orig2/test_catalog_list_entity_descriptors_yaml.py b/tests.orig2/test_catalog_list_entity_descriptors_yaml.py deleted file mode 100644 index 057c176..0000000 --- a/tests.orig2/test_catalog_list_entity_descriptors_yaml.py +++ /dev/null @@ -1,6 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list-descriptors", "-y", "-t", "component"]) - list = [descriptor for descriptor in response['descriptors'] if yaml.safe_load(descriptor)['info']['x-cortex-tag'] == "backend-worker"] - assert yaml.safe_load(list[0])['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" diff --git a/tests.orig2/test_catalog_list_include_archived.py b/tests.orig2/test_catalog_list_include_archived.py deleted file mode 100644 index b572e33..0000000 --- a/tests.orig2/test_catalog_list_include_archived.py +++ /dev/null @@ -1,8 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-z", "500"]) - assert not any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should not find archived entity" - - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-a", "-z", "500"]) - assert any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should find archived entity" diff --git a/tests.orig2/test_catalog_list_include_owners.py b/tests.orig2/test_catalog_list_include_owners.py deleted file mode 100644 index 5c339b3..0000000 --- a/tests.orig2/test_catalog_list_include_owners.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-io"]) - assert not(response['entities'][0]['owners']['teams'] is None), "Teams array should be returned in result" diff --git a/tests.orig2/test_catalog_list_page.py b/tests.orig2/test_catalog_list_page.py deleted file mode 100644 index 8c7d7b8..0000000 --- a/tests.orig2/test_catalog_list_page.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-p", "0"]) - assert (len(response['entities']) > 0) diff --git a/tests.orig2/test_catalog_list_page_size.py b/tests.orig2/test_catalog_list_page_size.py deleted file mode 100644 index a91113c..0000000 --- a/tests.orig2/test_catalog_list_page_size.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "list", "-g", "public-api-test", "-z", "1"]) - assert (len(response['entities']) == 1) diff --git a/tests.orig2/test_catalog_retrieve_entity_descriptor.py b/tests.orig2/test_catalog_retrieve_entity_descriptor.py deleted file mode 100644 index 6a1dab3..0000000 --- a/tests.orig2/test_catalog_retrieve_entity_descriptor.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "descriptor", "-t", "backend-worker"]) - assert response['info']['x-cortex-tag'] == "backend-worker" diff --git a/tests.orig2/test_catalog_retrieve_entity_descriptor_yaml.py b/tests.orig2/test_catalog_retrieve_entity_descriptor_yaml.py deleted file mode 100644 index 7ee95e3..0000000 --- a/tests.orig2/test_catalog_retrieve_entity_descriptor_yaml.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "descriptor", "-y", "-t", "backend-worker"], "text") - assert yaml.safe_load(response)['info']['x-cortex-tag'] == "backend-worker" diff --git a/tests.orig2/test_catalog_retrieve_entity_details.py b/tests.orig2/test_catalog_retrieve_entity_details.py deleted file mode 100644 index 0380f48..0000000 --- a/tests.orig2/test_catalog_retrieve_entity_details.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker"]) - assert response['tag'] == "backend-worker" diff --git a/tests.orig2/test_catalog_retrieve_entity_details_hierarchy_fields.py b/tests.orig2/test_catalog_retrieve_entity_details_hierarchy_fields.py deleted file mode 100644 index dee7ede..0000000 --- a/tests.orig2/test_catalog_retrieve_entity_details_hierarchy_fields.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["catalog", "details", "-t", "backend-worker", "-i", "groups"]) - assert response['tag'] == "backend-worker" diff --git a/tests.orig2/test_catalog_retrieve_entity_scorecard_scores.py b/tests.orig2/test_catalog_retrieve_entity_scorecard_scores.py deleted file mode 100644 index 8097d64..0000000 --- a/tests.orig2/test_catalog_retrieve_entity_scorecard_scores.py +++ /dev/null @@ -1,7 +0,0 @@ -from common import * - -@pytest.mark.skip(reason="Cannot rely on scorecard to have been evaluated. Need FR to force evaluation?") -def test(capsys): - response = cli_command(capsys, ["catalog", "scorecard-scores", "-t", "backend-worker"]) - list = [scorecard for scorecard in response if scorecard['scorecardName'] == "Public API Test Production Readiness"] - assert list[0]['score'] == 1 diff --git a/tests.orig2/test_catalog_unarchive_entity.py b/tests.orig2/test_catalog_unarchive_entity.py deleted file mode 100644 index b3a455c..0000000 --- a/tests.orig2/test_catalog_unarchive_entity.py +++ /dev/null @@ -1,13 +0,0 @@ -from common import * - -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/unarchive-entity.yaml"]) - cli(["-q", "catalog", "archive", "-t", "unarchive-entity"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - response = cli_command(capsys, ["catalog", "details", "-t", "unarchive-entity"]) - assert response['isArchived'] == True, "isArchived attribute should be true" - - response = cli_command(capsys, ["catalog", "unarchive", "-t", "unarchive-entity"]) - assert response['isArchived'] == False, "isArchived attribute should not be true" diff --git a/tests.orig2/test_config_file.py b/tests.orig2/test_config_file.py deleted file mode 100644 index cc8f75a..0000000 --- a/tests.orig2/test_config_file.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -Tests for the cortex CLI config file -""" - -# These tests are all marked to run in serial order because they make modifications to the -# cortex config file and/or CORTEX_API_KEY value and would potentially impact other tests -# that are running in parallel (with poetry run pytest -n auto), so they are run separately. - -# Additionally, order is VERY IMPORTANT in this file because of the way CORTEX_API key is -# deleted, set to invalid values, etc. Moving test order could impact the overall success -# of pytest. Tread carefully here. -from cortexapps_cli.cortex import cli - -import io -import os -import pytest -import sys -from string import Template - -# Requires user input, so use monkeypatch to set it. -@pytest.fixture(scope="session") -def delete_cortex_api_key(): - if "CORTEX_API_KEY" in os.environ: - del os.environ['CORTEX_API_KEY'] - -@pytest.mark.serial -def test_config_file_api_key_quotes(tmp_path): - cortex_api_key = os.getenv('CORTEX_API_KEY') - f = tmp_path / "cortex_config_api_key_quotes" - template = Template(""" - [default] - api_key = "${cortex_api_key}" - """) - content = template.substitute(cortex_api_key=cortex_api_key) - print(content) - f.write_text(content) - cli(["-c", str(f), "teams", "list"]) - -@pytest.mark.serial -def test_environment_variables(capsys): - cli(["teams", "list"]) - out, err = capsys.readouterr() - #print(out) - print("ERR = " + err) - assert err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY", "Warning should be displayed by default" - - cli(["-q", "teams", "list"]) - out, err = capsys.readouterr() - assert not(err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY"), "Warning should be displayed with -q option" - -@pytest.mark.serial -def test_config_file_create(monkeypatch, tmp_path, delete_cortex_api_key): - with pytest.raises(SystemExit) as excinfo: - monkeypatch.setattr('sys.stdin', io.StringIO('Y')) - f = tmp_path / "test-config.txt" - cli(["-c", str(f), "catalog", "list"]) - -@pytest.mark.serial -def test_config_file_new(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config" - content = """ - [default] - api_key = REPLACE_WITH_YOUR_CORTEX_API_KEY - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - -@pytest.mark.serial -def test_export(capsys, delete_cortex_api_key): - cli(["-t", "rich-sandbox", "backup", "export"]) - out, err = capsys.readouterr() - last_line = out.strip().split("\n")[-1] - sys.stdout.write(out + "\n\n") - sys.stdout.write(last_line + "\n\n") - assert "rich-sandbox" in out - - export_directory = last_line.replace("Contents available in ", "") - - assert len(os.listdir(export_directory + "/catalog")) > 0, "catalog directory has files" - assert len(os.listdir(export_directory + "/scorecards")) > 0, "scorecards directory has files" - assert len(os.listdir(export_directory + "/resource-definitions")) > 0, "resource-definitions directory has files" - -@pytest.mark.serial -def test_config_file_bad_api_key(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config_bad_api_key" - content = """ - [default] - api_key = invalidApiKey - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "catalog", "list", "-t", "backend-worker"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" - -@pytest.mark.serial -def test_environment_variable_invalid_key(capsys): - with pytest.raises(SystemExit) as excinfo: - os.environ["CORTEX_API_KEY"] = "invalidKey" - cli(["teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" diff --git a/tests.orig2/test_custom_data_create_or_update_in_bulk.py b/tests.orig2/test_custom_data_create_or_update_in_bulk.py deleted file mode 100644 index efeb8cb..0000000 --- a/tests.orig2/test_custom_data_create_or_update_in_bulk.py +++ /dev/null @@ -1,12 +0,0 @@ -from tests.helpers.utils import * - -def test(): - cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - - response = json_response(["catalog", "details", "-t", "backend-worker"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-1"] - assert list[0]['value'] == "value-1" - - response = json_response( ["catalog", "details", "-t", "ach-payments-nacha"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "bulk-key-4"] - assert list[0]['value'] == "value-4" diff --git a/tests.orig2/test_custom_data_delete.py b/tests.orig2/test_custom_data_delete.py deleted file mode 100644 index e338206..0000000 --- a/tests.orig2/test_custom_data_delete.py +++ /dev/null @@ -1,12 +0,0 @@ -from tests.helpers.utils import * - -def test(): - cli(["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) - - response = json_response(["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) - assert response['value'] == "yes" - - cli(["custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) - - response = json_response(["catalog", "details", "-t", "recommendations"]) - assert not any(metadata['key'] == 'delete-me' for metadata in response['metadata']) diff --git a/tests.orig2/test_custom_data_list.py b/tests.orig2/test_custom_data_list.py deleted file mode 100644 index ae15abc..0000000 --- a/tests.orig2/test_custom_data_list.py +++ /dev/null @@ -1,6 +0,0 @@ -from tests.helpers.utils import * - -def test(): - response = json_response(["catalog", "details", "-t", "backend-worker"]) - list = [metadata for metadata in response['metadata'] if metadata['key'] == "cicd"] - assert list[0]['value'] == "circle-ci" diff --git a/tests.orig2/test_custom_events_list.py b/tests.orig2/test_custom_events_list.py deleted file mode 100644 index e406bd9..0000000 --- a/tests.orig2/test_custom_events_list.py +++ /dev/null @@ -1,15 +0,0 @@ -from common import * - -def test(capsys): - cli(["-q", "custom-events", "delete-all", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - cli(["-q", "custom-events", "create", "-t", "transaction-store", "-f", "data/run-time/custom-events.json"]) - capsys.readouterr() - - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" - - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" - - response = cli_command(capsys, ["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE", "-i", "2023-10-10T13:27:51.226"]) - assert response['events'][0]['type'] == "VALIDATE_SERVICE" diff --git a/tests.orig2/test_custom_events_uuid.py b/tests.orig2/test_custom_events_uuid.py deleted file mode 100644 index 507c1d5..0000000 --- a/tests.orig2/test_custom_events_uuid.py +++ /dev/null @@ -1,26 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["custom-events", "create", "-t", "warehousing", "-f", "data/run-time/custom-events-configure.json"]) - uuid = response['uuid'] - - cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - assert response['type'] == "CONFIG_SERVICE" - - cli(["-q", "custom-events", "update-by-uuid", "-t", "warehousing", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - capsys.readouterr() - - response = cli_command(capsys, ["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - assert response['type'] == "VALIDATE_SERVICE" - - cli(["-q", "custom-events", "delete-by-uuid", "-t", "warehousing", "-u", uuid]) - - # Custom event was deleted, so verify it cannot be retrieved. - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) - out, err = capsys.readouterr() - - assert out == "Bad Request" - assert excinfo.value.code == 144 - - cli(["-q", "custom-events", "delete-all", "-t", "warehousing"]) diff --git a/tests.orig2/test_docs.py b/tests.orig2/test_docs.py deleted file mode 100644 index 495c875..0000000 --- a/tests.orig2/test_docs.py +++ /dev/null @@ -1,18 +0,0 @@ -from common import * - -def test(capsys): - cli_command(capsys, ["catalog", "create", "-f", "data/run-time/docs-entity.yaml"]) - - cli_command(capsys, ["docs", "update", "-t", "docs-entity", "-f", "data/run-time/docs.yaml"]) - - response = cli_command(capsys, ["docs", "get", "-t", "docs-entity"]) - spec = yaml.safe_load(response['spec']) - assert spec['info']['title'] == "Simple API overview", "API spec should have been retrieved" - - cli_command(capsys, ["-q", "docs", "delete", "-t", "docs-entity"], "none") - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "docs", "get", "-t", "docs-entity"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 diff --git a/tests.orig2/test_entity_types.py b/tests.orig2/test_entity_types.py deleted file mode 100644 index 7073cd5..0000000 --- a/tests.orig2/test_entity_types.py +++ /dev/null @@ -1,26 +0,0 @@ -from common import * -# Entity Types were previously known as resource definitions. That's why the CLI -# command is 'resource-definitions'. CLI will likely be updated in the future to -# deprecate this. - -def test(capsys): - entity_type = "public-api-type-empty-schema" - response = cli_command(capsys, ["resource-definitions", "list"]) - - if any(entity['type'] == entity_type for entity in response['definitions']): - cli(["-q", "catalog", "delete-by-type", "-t", entity_type]) - cli(["-q", "resource-definitions", "delete", "-t", entity_type]) - - cli_command(capsys, ["resource-definitions", "create", "-f", "data/run-time/create-entity-type-empty-schema.json"]) - - response = cli_command(capsys, ["resource-definitions", "list"]) - assert any(entity['type'] == entity_type for entity in response['definitions']), "Entity type should be returned in list" - - response = cli_command(capsys, ["resource-definitions", "get", "-t", entity_type]) - assert response['type'] == entity_type, "Type of returned entity type should be " + entity_type + "." - - cli_command(capsys, ["resource-definitions", "update", "-t", entity_type, "-f", "data/run-time/update-entity-type-empty-schema.json"]) - - response = cli_command(capsys, ["resource-definitions", "get", "-t", entity_type]) - assert response['name'] == "Public API Type With Empty Schema -- Update", "Name should be updated for entity type" - cli(["-q", "catalog", "delete-by-type", "-t", entity_type]) diff --git a/tests.orig2/test_github.py b/tests.orig2/test_github.py deleted file mode 100644 index 1d9a59d..0000000 --- a/tests.orig2/test_github.py +++ /dev/null @@ -1,19 +0,0 @@ -from common import * -from cortex_github import * - -# I don't think these tests can reliably run in parallel. Can result in PyGitHub reporting errors like this: -# FAILED tests/test_github_cortex_yaml_in_root.py::test -# github.GithubException.GithubException: 409 {"message": "is at ef660f9 but expected 418d7ec", "documentation_url": -@pytest.mark.skipif(enable_ui_editing("SERVICE") == True, reason="Account flag ENABLE_UI_EDITING for SERVICE is true.") -@pytest.mark.skipif(os.getenv('CORTEX_ENV') != "staging" or os.getenv('CORTEX_TENANT') != "jeff-sandbox", reason="To prevent git commit clashes, the test for cortex.yaml in the root will only run for main API test in staging") -def test_github_cortex_yaml_in_root(capsys): - assert gitops_add(capsys, "data/run-time/gitops.tmpl", "cortex.yaml") == True, "failed to find commit in gitops-logs" - - response = cli_command(capsys, ["catalog", "details", "-t", "gitops-entity"]) - assert response['tag'] == "gitops-entity", "Entity details can be retrieved for gitops entity" - -@pytest.mark.skipif(enable_ui_editing("SERVICE") == True, reason="Account flag ENABLE_UI_EDITING for SERVICE is true.") -def test_github_entity_in_dot_cortex(capsys): - assert gitops_add(capsys, "data/run-time/gitops-catalog.tmpl", ".cortex/catalog/" + os.getenv('CORTEX_ENV') + "-" + os.getenv('CORTEX_TENANT') + "-gitops-catalog.yaml") == True, "failed to find commit in gitops-logs" - response = cli_command(capsys, ["catalog", "details", "-t", os.getenv('CORTEX_ENV') + "-" + os.getenv('CORTEX_TENANT') + "-gitops-catalog"]) - assert response['tag'] == os.getenv('CORTEX_ENV') + "-" + os.getenv('CORTEX_TENANT') + "-gitops-catalog" diff --git a/tests.orig2/test_groups.py b/tests.orig2/test_groups.py deleted file mode 100644 index 3021f74..0000000 --- a/tests.orig2/test_groups.py +++ /dev/null @@ -1,14 +0,0 @@ -from common import * - -def test(capsys): - cli_command(capsys, ["catalog", "create", "-f", "data/run-time/groups-entity.yaml"]) - - cli_command(capsys, ["groups", "add", "-t", "groups-entity", "-f", "data/run-time/groups.json"]) - - response = cli_command(capsys, ["groups", "get", "-t", "groups-entity"]) - assert any(group['tag'] == "group1" for group in response['groups']), "Entity should have 'group1' as a group" - - cli(["-q", "groups", "delete", "-t", "groups-entity", "-f", "data/run-time/groups.json"]) - - response = cli_command(capsys, ["groups", "get", "-t", "groups-entity"]) - assert not any(group['tag'] == "group1" for group in response['groups']), "Entity should NOT have 'group1' as a group" diff --git a/tests.orig2/test_ip_allowlist.py b/tests.orig2/test_ip_allowlist.py deleted file mode 100644 index 039616c..0000000 --- a/tests.orig2/test_ip_allowlist.py +++ /dev/null @@ -1,49 +0,0 @@ -from common import * - -def _ip_allowlist_input(tmp_path, ip_address, description): - f = tmp_path / "test_ip_allowlist_input.json" - template = Template(""" - { - "entries": [ - { - "address": "${ip_address}", - "description": "string" - }, - { - "address": "127.0.0.1", - "description": "${description}" - } - ] - } - """) - content = template.substitute(ip_address=ip_address, description=description) - f.write_text(content) - return f - -def test(tmp_path, capsys): - ip_address = requests.get("https://ip.me").text.strip() - - description = "initial description" - f = _ip_allowlist_input(tmp_path, ip_address, description) - response = cli_command(capsys, ["ip-allowlist", "validate", "-f", str(f)]) - - # Initial replace - cli_command(capsys, ["ip-allowlist", "replace", "-f", str(f)]) - response = cli_command(capsys, ["ip-allowlist", "get"]) - assert any(entry['description'] == description for entry in response['entries']), "Allowlist entry should have expected description" - - # Updated replace - updated_description = "updated description" - f = _ip_allowlist_input(tmp_path, ip_address, updated_description) - cli_command(capsys, ["ip-allowlist", "replace", "-f", str(f)]) - response = cli_command(capsys, ["ip-allowlist", "get"]) - assert any(entry['description'] == updated_description for entry in response['entries']), "Allowlist entry should be updated" - - cli_command(capsys, ["ip-allowlist", "replace", "-f", "data/run-time/ip_allowlist_empty.json"]) - - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "catalog", "ip-allowlist", "-f", "data/run-time/ip_allowlist_invalid.json"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 diff --git a/tests.orig2/test_packages.py b/tests.orig2/test_packages.py deleted file mode 100644 index adbc8d4..0000000 --- a/tests.orig2/test_packages.py +++ /dev/null @@ -1,28 +0,0 @@ -from common import * - -# When trying to put python and node tests in separate tests running in parallel, got 409 Conflict HTTP errors, -# even when using different entity tag. -def test(capsys): - cli_command(capsys, ["packages", "go", "upload", "-t", "sso-integration", "-f", "data/run-time/packages_go.sum"]) - packages(capsys, "go", "GO", "3.3.0", "github.com/gofrs/uuid", "sso-integration") - - cli_command(capsys, ["packages", "python", "upload-pipfile", "-t", "sso-integration", "-f", "data/run-time/packages_python_pipfile.lock"]) - packages(capsys, "python", "PYTHON", "2022.12.7", "certifi", "sso-integration") - - cli_command(capsys, ["packages", "python", "upload-requirements", "-t", "sso-integration", "-f", "data/run-time/packages_python_requirements.txt"]) - packages(capsys, "python", "PYTHON", "1.0.6", "contourpy", "sso-integration") - - cli_command(capsys, ["packages", "node", "upload-package", "-t", "sso-integration", "-f", "data/run-time/packages_node_package.json"]) - packages(capsys, "node", "NODE", "^4.1.11", "clean-css", "sso-integration") - - cli_command(capsys, ["packages", "node", "upload-package-lock", "-t", "sso-integration", "-f", "data/run-time/packages_node_package_lock.json"]) - packages(capsys, "node", "NODE", "4.2.6", "@angular/common", "sso-integration") - - cli_command(capsys, ["packages", "node", "upload-yarn-lock", "-t", "sso-integration", "-f", "data/run-time/packages_node_yarn.lock"]) - packages(capsys, "node", "NODE", "6.16.5", "@types/babylon", "sso-integration") - - cli_command(capsys, ["packages", "nuget", "upload-packages-lock", "-t", "sso-integration", "-f", "data/run-time/packages_nuget_packages_lock.json"]) - packages(capsys, "nuget", "NUGET", "1.0.0", "Microsoft.NETFramework.ReferenceAssemblies", "sso-integration") - - cli_command(capsys, ["packages", "nuget", "upload-csproj", "-t", "sso-integration", "-f", "data/run-time/packages_nuget.csproj"]) - packages(capsys, "nuget", "NUGET", "7.1.1", "CsvHelper", "sso-integration") diff --git a/tests.orig2/test_packages_java.py b/tests.orig2/test_packages_java.py deleted file mode 100644 index fb6aa21..0000000 --- a/tests.orig2/test_packages_java.py +++ /dev/null @@ -1,6 +0,0 @@ -from common import * - -def test(capsys): - cli_command(capsys, ["packages", "java", "upload-single", "-t", "sso-integration", "-f", "data/run-time/packages_java_single.json"]) - cli_command(capsys, ["packages", "java", "upload-multiple", "-t", "sso-integration", "-f", "data/run-time/packages_java_multiple.json"]) - packages(capsys, "java", "JAVA", "3.3.3", "io.cortex.teams", "sso-integration") diff --git a/tests.orig2/test_plugins.py b/tests.orig2/test_plugins.py deleted file mode 100644 index 3c6a443..0000000 --- a/tests.orig2/test_plugins.py +++ /dev/null @@ -1,23 +0,0 @@ -from common import * - -def test(capsys): - pluginTag = "public-api-test-plugin" - - response = cli_command(capsys, ["plugins", "get"]) - if any(plugin['tag'] == pluginTag for plugin in response['plugins']): - cli(["plugins", "delete", "-t", pluginTag]) - - cli_command(capsys, ["plugins", "create", "-f", "data/run-time/test_plugins.json"]) - - response = cli_command(capsys, ["plugins", "get"]) - assert any(plugin['tag'] == pluginTag for plugin in response['plugins']), "Plugin " + plugin + " returned in get" - - cli_command(capsys, ["plugins", "update", "-t", pluginTag, "-f", "data/run-time//test_plugins_update.json"]) - - response = cli_command(capsys, ["plugins", "get-by-tag", "-t", pluginTag]) - assert response['tag'] == pluginTag, "Plugin " + plugin + " returned by get-by-tag" - assert response['description'] == "Just testing plugin updates", "Plugin " + plugin + " description updated" - - cli(["-q", "plugins", "delete", "-t", pluginTag]) - response = cli_command(capsys, ["plugins", "get"]) - assert not any(plugin['tag'] == pluginTag for plugin in response['plugins']), "Plugin " + plugin + " returned in get" diff --git a/tests.orig2/test_plugins_invalid.py b/tests.orig2/test_plugins_invalid.py deleted file mode 100644 index 0c6fe55..0000000 --- a/tests.orig2/test_plugins_invalid.py +++ /dev/null @@ -1,18 +0,0 @@ -from common import * - -# Using a key with viewer role should be Forbidden. -@mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) -def test(capsys): - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "plugins", "create", "-f", "data/run-time/test_plugins_manager.json"]) - out, err = capsys.readouterr() - - assert out == "Forbidden", "Attempt to create plugin as a VIEWER with minimumRole defined as MANAGER should be Forbidden" - assert excinfo.value.code == 403, "VIEWER role cannot create plugin with minimumRole defined as MANAGER" - - with pytest.raises(SystemExit) as excinfo: - cli(["-q", "plugins", "create", "-f", "data/run-time/test_plugins_invalid_role.json"]) - out, err = capsys.readouterr() - - assert out == "Bad Request", "Invalid minimumRole results in Bad Request" - assert excinfo.value.code == 400, "Invalid minimumRole should result in 400 return code" diff --git a/tests.orig2/test_scorecards.py b/tests.orig2/test_scorecards.py deleted file mode 100644 index 2c6bc86..0000000 --- a/tests.orig2/test_scorecards.py +++ /dev/null @@ -1,30 +0,0 @@ -from common import * - -@pytest.mark.skipif(enable_cql_v2() == False, reason="Account flag ENABLE_CQL_V2 is not not set.") -def test_scorecards(capsys): - scorecardTag = "public-api-test-scorecard" - entityTag = "user-profile-metadata-service" - - response = cli_command(capsys, ["scorecards", "create", "-f", "data/run-time/scorecard.yaml"]) - assert response['scorecard']['tag'] == scorecardTag, "Scorecard with tag public-api-test-scorecard should be created" - - response = cli_command(capsys, ["scorecards", "list"]) - assert any(scorecard['tag'] == scorecardTag for scorecard in response['scorecards']), scorecard + " should be in list of scorecards" - - response = cli_command(capsys, ["scorecards", "shield", "-s", scorecardTag, "-t", entityTag]) - # Dear future (hopefully smarter) self, feel free to enhance the regex to search for the correct brackets and parentheses in the regular expression. - assert re.search(".*Public API Test Scorecard.*https://img.shields.io.*", response['value']), "Value includes scorecard name and shields URL" - - response = cli_command(capsys, ["scorecards", "get", "-t", scorecardTag]) - assert response['scorecard']['tag'] == scorecardTag, "Can retrieve tag of scorecard" - assert response['scorecard']['levels'][0]['level']['name'] == 'Gold', "Can retrieve level name defined in scorecard" - - response = cli_command(capsys, ["scorecards", "descriptor", "-t", scorecardTag], "text") - assert yaml.safe_load(response)['tag'] == scorecardTag, "Can get tag from YAML descriptor" - -# cli(["scorecards", "next-steps", "-t", "public-api-test-scorecard", "-e", "user-profile-metadata-service"]) - -# # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. -# cli(["scorecards", "scores", "-t", "public-api-test-scorecard", "-e", "user-profile-metadata-service"]) - -# cli(["scorecards", "scores", "-t", "public-api-test-scorecard"]) diff --git a/tests.orig2/test_scorecards_drafts.py b/tests.orig2/test_scorecards_drafts.py deleted file mode 100644 index ae7c293..0000000 --- a/tests.orig2/test_scorecards_drafts.py +++ /dev/null @@ -1,11 +0,0 @@ -from common import * - -@pytest.mark.skipif(enable_cql_v2() == False, reason="Account flag ENABLE_CQL_V2 is not not set.") -def test(capsys): - cli_command(capsys, ["scorecards", "create", "-f", "data/run-time/scorecard_drafts.yaml"]) - - response = cli_command(capsys, ["scorecards", "list", "-s"]) - assert any(scorecard['tag'] == 'public-api-test-draft-scorecard' for scorecard in response['scorecards']), "Draft scorecards are returned with showDrafts query parameter" - - response = cli_command(capsys, ["scorecards", "list"]) - assert not any(scorecard['tag'] == 'public-api-test-draft-scorecard' for scorecard in response['scorecards']), "Draft scorecards are not returned without showDrafts query parameter" diff --git a/tests.orig2/test_teams.py b/tests.orig2/test_teams.py deleted file mode 100644 index 385c68b..0000000 --- a/tests.orig2/test_teams.py +++ /dev/null @@ -1,5 +0,0 @@ -from common import * - -def test(capsys): - response = cli_command(capsys, ["teams", "list"]) - assert any(team['teamTag'] == 'payments-team' for team in response['teams']) diff --git a/tests.orig/test-groups.json b/tests/test-groups.json similarity index 100% rename from tests.orig/test-groups.json rename to tests/test-groups.json diff --git a/tests/test_catalog_create_entity.py b/tests/test_catalog_create_entity.py index 1858481..a7a1bb8 100644 --- a/tests/test_catalog_create_entity.py +++ b/tests/test_catalog_create_entity.py @@ -1,8 +1,7 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/create-entity.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "descriptor", "-t", "create-entity"]) - print(str(response)) - assert response['info']['x-cortex-tag'] == "create-entity" + response = cli(["catalog", "descriptor", "-t", "test-service"]) + assert response['info']['x-cortex-tag'] == "test-service" diff --git a/tests/test_catalog_get_entity_details.py b/tests/test_catalog_get_entity_details.py index c1563ab..534b226 100644 --- a/tests/test_catalog_get_entity_details.py +++ b/tests/test_catalog_get_entity_details.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli( ["catalog", "details", "-t", "backend-worker"]) - assert response['tag'] == 'backend-worker', "Entity details should be returned" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli( ["catalog", "details", "-t", "test-service"]) + assert response['tag'] == 'test-service', "Entity details should be returned" diff --git a/tests/test_catalog_get_entity_details_hierarchy.py b/tests/test_catalog_get_entity_details_hierarchy.py index 4006392..4fda28e 100644 --- a/tests/test_catalog_get_entity_details_hierarchy.py +++ b/tests/test_catalog_get_entity_details_hierarchy.py @@ -1,6 +1,9 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "details", "-i", "groups", "-t", "sso-integration"]) - assert response['hierarchy']['parents'][0]['groups'][0] == 'public-api-test', "Entity groups should be in response" - assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'public-api-test', "Parent groups should be in response" + cli(["catalog", "create", "-f", "data/run-time/test-domain-parent.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-domain-child.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + response = cli(["catalog", "details", "-i", "groups", "-t", "test-service"]) + assert response['hierarchy']['parents'][0]['groups'][0] == 'cli-test', "Entity groups should be in response" + assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'cli-test', "Parent groups should be in response" diff --git a/tests/test_catalog_include_links.py b/tests/test_catalog_include_links.py index 2de92da..c998c0c 100644 --- a/tests/test_catalog_include_links.py +++ b/tests/test_catalog_include_links.py @@ -1,7 +1,8 @@ from tests.helpers.utils import * -# Too brittle if we assume only one entity has group 'include-links-test'? def test(): + response = cli(["catalog", "create", "-f", "data/run-time/test-service-links.yaml"]) + response = cli(["catalog", "list", "-g", "include-links-test"]) assert (len(response['entities'][0]['links']) == 0) response = cli(["catalog", "list", "-g", "include-links-test", "-l"]) diff --git a/tests/test_catalog_include_metadata.py b/tests/test_catalog_include_metadata.py index aaab24c..6bbac08 100644 --- a/tests/test_catalog_include_metadata.py +++ b/tests/test_catalog_include_metadata.py @@ -1,8 +1,10 @@ from tests.helpers.utils import * -# Too brittle if we assume only one entity has group 'include-metadata-test'? def test(): + cli(["catalog", "create", "-f", "data/run-time/test-service-metadata.yaml"]) + response = cli(["catalog", "list", "-g", "include-metadata-test"]) assert (len(response['entities'][0]['metadata']) == 0) + response = cli(["catalog", "list", "-g", "include-metadata-test", "-m"]) assert (len(response['entities'][0]['metadata']) > 0) diff --git a/tests/test_catalog_include_nested_fields.py b/tests/test_catalog_include_nested_fields.py index 7146cd2..71349ef 100644 --- a/tests/test_catalog_include_nested_fields.py +++ b/tests/test_catalog_include_nested_fields.py @@ -1,8 +1,9 @@ from tests.helpers.utils import * -#@pytest.mark.skipif(allow_team_entities_in_catalog_api() == False, reason="Account flag ALLOW_TEAM_ENTITIES_IN_CATALOG_API is not set") def test(): - response = cli(["catalog", "list", "-g", "public-api-test", "-io", "-in", "team:members"]) - list = [entity for entity in response['entities'] if entity['tag'] == "search-experience"] - assert not list == None, "found search-experience entity in response" + response = cli(["catalog", "create", "-f", "data/run-time/test-team-1.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test", "-io", "-in", "team:members"]) + list = [entity for entity in response['entities'] if entity['tag'] == "test-team-1"] + assert not list == None, "found an entity in response" assert len(list[0]['members']) > 0, "response has non-empty array of members" diff --git a/tests/test_catalog_list_by_group_multiple.py b/tests/test_catalog_list_by_group_multiple.py index de32aaf..192a609 100644 --- a/tests/test_catalog_list_by_group_multiple.py +++ b/tests/test_catalog_list_by_group_multiple.py @@ -1,5 +1,8 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list", "-g", "public-api-test-group-1,public-api-test-group-2"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-group-1.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-group-2.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test-group-1,cli-test-group-2"]) assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_group_single.py b/tests/test_catalog_list_by_group_single.py index 9b58dc1..811bdcd 100644 --- a/tests/test_catalog_list_by_group_single.py +++ b/tests/test_catalog_list_by_group_single.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list", "-g", "public-api-test-group-1"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-group-1.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test-group-1"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_owners_multiple.py b/tests/test_catalog_list_by_owners_multiple.py index c1cf512..8add790 100644 --- a/tests/test_catalog_list_by_owners_multiple.py +++ b/tests/test_catalog_list_by_owners_multiple.py @@ -1,5 +1,10 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list", "-o", "payments-team,search-experience"]) + cli(["catalog", "create", "-f", "data/run-time/test-team-1.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-team-2.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-test-team-1.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-test-team-2.yaml"]) + + response = cli(["catalog", "list", "-o", "test-team-1,test-team-2"]) assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_owners_single.py b/tests/test_catalog_list_by_owners_single.py index e0e0a30..6adf2b1 100644 --- a/tests/test_catalog_list_by_owners_single.py +++ b/tests/test_catalog_list_by_owners_single.py @@ -1,5 +1,8 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list", "-o", "payments-team"]) + cli(["catalog", "create", "-f", "data/run-time/test-team-1.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-test-team-1.yaml"]) + + response = cli(["catalog", "list", "-o", "test-team-1"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_types.py b/tests/test_catalog_list_by_types.py index 5053dfc..c7bed2f 100644 --- a/tests/test_catalog_list_by_types.py +++ b/tests/test_catalog_list_by_types.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list", "-g", "public-api-test", "-t", "component"]) - assert response['total'] > 0, "Should find at least 1 entity of type 'component'" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test", "-t", "service"]) + assert response['total'] > 0, "Should find at least 1 entity of type 'service'" diff --git a/tests/test_catalog_list_entity_descriptors.py b/tests/test_catalog_list_entity_descriptors.py index 1cd5fcf..7935690 100644 --- a/tests/test_catalog_list_entity_descriptors.py +++ b/tests/test_catalog_list_entity_descriptors.py @@ -1,6 +1,9 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list-descriptors", "-t", "api"]) - list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "api-australia"] - assert list[0]['info']['x-cortex-groups'][0] == "public-api-test" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list-descriptors", "-t", "service"]) + + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "test-service"] + assert list[0]['info']['x-cortex-groups'][0] == "cli-test" diff --git a/tests/test_catalog_list_entity_descriptors_page.py b/tests/test_catalog_list_entity_descriptors_page.py index 1b505c6..3f38775 100644 --- a/tests/test_catalog_list_entity_descriptors_page.py +++ b/tests/test_catalog_list_entity_descriptors_page.py @@ -1,5 +1,9 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list-descriptors", "-t", "component", "-p", "0", "-z", "1"]) - assert response['descriptors'][0]['info']['x-cortex-tag'] == "backend-worker" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + response = cli(["catalog", "list-descriptors", "-t", "service", "-p", "0", "-z", "1"]) + + # YAML descriptor has single quotes, so cannot read it as valid JSON. First convert to double quotes. + json_data = json.loads(str(response).replace("'", "\"")) + assert len(json_data['descriptors']) == 1, "exactly one descriptor is returned" diff --git a/tests/test_catalog_list_entity_descriptors_page_size.py b/tests/test_catalog_list_entity_descriptors_page_size.py index a0ffd67..8a62fea 100644 --- a/tests/test_catalog_list_entity_descriptors_page_size.py +++ b/tests/test_catalog_list_entity_descriptors_page_size.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list-descriptors", "-t", "component", "-p", "0", "-z", "1"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list-descriptors", "-t", "service", "-p", "0", "-z", "1"]) assert (len(response['descriptors']) == 1) diff --git a/tests/test_catalog_list_entity_descriptors_yaml.py b/tests/test_catalog_list_entity_descriptors_yaml.py index 2e18279..1644e6d 100644 --- a/tests/test_catalog_list_entity_descriptors_yaml.py +++ b/tests/test_catalog_list_entity_descriptors_yaml.py @@ -1,7 +1,8 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list-descriptors", "-y", "--types", "component"]) - list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "backend-worker"] - print("list = " + str(list)) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list-descriptors", "-y", "--types", "service"]) + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "test-service"] assert list[0]['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" diff --git a/tests/test_catalog_list_include_archived.py b/tests/test_catalog_list_include_archived.py index 0577b87..0582ad0 100644 --- a/tests/test_catalog_list_include_archived.py +++ b/tests/test_catalog_list_include_archived.py @@ -1,8 +1,11 @@ from tests.helpers.utils import * def test(capsys): - response = cli(["catalog", "list", "-g", "public-api-test", "-z", "500"]) - assert not any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should not find archived entity" + response = cli(["catalog", "create", "-f", "data/run-time/archive-entity.yaml"]) + response = cli(["catalog", "archive", "-t", "archive-entity"]) - response = cli(["catalog", "list", "-g", "public-api-test", "-a", "-z", "500"]) - assert any(entity['tag'] == 'robot-item-sorter' for entity in response['entities']), "Should find archived entity" + response = cli(["catalog", "list", "-g", "cli-test", "-z", "500"]) + assert not any(entity['tag'] == 'archive-entity' for entity in response['entities']), "Should not find archived entity" + + response = cli(["catalog", "list", "-g", "cli-test", "-a", "-z", "500"]) + assert any(entity['tag'] == 'archive-entity' for entity in response['entities']), "Should find archived entity" diff --git a/tests/test_catalog_retrieve_entity_descriptor.py b/tests/test_catalog_retrieve_entity_descriptor.py index 093e7d4..a7a1bb8 100644 --- a/tests/test_catalog_retrieve_entity_descriptor.py +++ b/tests/test_catalog_retrieve_entity_descriptor.py @@ -1,6 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "descriptor", "-t", "backend-worker"]) - print(response) - assert response['info']['x-cortex-tag'] == "backend-worker" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "descriptor", "-t", "test-service"]) + assert response['info']['x-cortex-tag'] == "test-service" diff --git a/tests/test_catalog_retrieve_entity_descriptor_yaml.py b/tests/test_catalog_retrieve_entity_descriptor_yaml.py index 805829e..3a304fb 100644 --- a/tests/test_catalog_retrieve_entity_descriptor_yaml.py +++ b/tests/test_catalog_retrieve_entity_descriptor_yaml.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "descriptor", "-y", "-t", "backend-worker"], ReturnType.STDOUT) - assert yaml.safe_load(response)['info']['x-cortex-tag'] == "backend-worker" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "descriptor", "-y", "-t", "test-service"], ReturnType.STDOUT) + assert yaml.safe_load(response)['info']['x-cortex-tag'] == "test-service" diff --git a/tests/test_catalog_retrieve_entity_details.py b/tests/test_catalog_retrieve_entity_details.py index 03d5460..6e78de8 100644 --- a/tests/test_catalog_retrieve_entity_details.py +++ b/tests/test_catalog_retrieve_entity_details.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "details", "-t", "backend-worker"]) - assert response['tag'] == "backend-worker" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "details", "-t", "test-service"]) + assert response['tag'] == "test-service" diff --git a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py index ccad405..9670523 100644 --- a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py +++ b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "details", "-t", "backend-worker", "-i", "groups"]) - assert response['tag'] == "backend-worker" + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "details", "-t", "test-service", "-i", "groups"]) + assert response['tag'] == "test-service" diff --git a/tests/conftest.py b/tests/test_conftest.py similarity index 100% rename from tests/conftest.py rename to tests/test_conftest.py diff --git a/tests/test_custom_data_create_or_update_in_bulk.py b/tests/test_custom_data_create_or_update_in_bulk.py index ff89a87..5867aa4 100644 --- a/tests/test_custom_data_create_or_update_in_bulk.py +++ b/tests/test_custom_data_create_or_update_in_bulk.py @@ -3,10 +3,10 @@ def test(): cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - result = cli(["catalog", "details", "-t", "backend-worker"]) + result = cli(["catalog", "details", "-t", "test-service-caller"]) list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-1"] assert list[0]['value'] == "value-1" - result = cli( ["catalog", "details", "-t", "ach-payments-nacha"]) + result = cli( ["catalog", "details", "-t", "test-service-callee"]) list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-4"] assert list[0]['value'] == "value-4" diff --git a/tests/test_custom_data_delete.py b/tests/test_custom_data_delete.py index 07db6ef..12bc7c9 100644 --- a/tests/test_custom_data_delete.py +++ b/tests/test_custom_data_delete.py @@ -1,12 +1,14 @@ from tests.helpers.utils import * def test(): - cli(["custom-data", "add", "-t", "recommendations", "-f", "data/run-time/custom-data-delete.json"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - result = cli(["custom-data", "get", "-t", "recommendations", "-k", "delete-me"]) + cli(["custom-data", "add", "-t", "test-service", "-f", "data/run-time/custom-data-delete.json"]) + + result = cli(["custom-data", "get", "-t", "test-service", "-k", "delete-me"]) assert result['value'] == "yes" - cli(["custom-data", "delete", "-t", "recommendations", "-k", "delete-me"]) + cli(["custom-data", "delete", "-t", "test-service", "-k", "delete-me"]) - result = cli(["catalog", "details", "-t", "recommendations"]) + result = cli(["catalog", "details", "-t", "test-service"]) assert not any(metadata['key'] == 'delete-me' for metadata in result['metadata']) diff --git a/tests/test_custom_data_list.py b/tests/test_custom_data_list.py index 0bfa5c1..01f97e3 100644 --- a/tests/test_custom_data_list.py +++ b/tests/test_custom_data_list.py @@ -1,6 +1,8 @@ from tests.helpers.utils import * def test(): - result = cli(["catalog", "details", "-t", "backend-worker"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + result = cli(["catalog", "details", "-t", "test-service"]) list = [metadata for metadata in result['metadata'] if metadata['key'] == "cicd"] assert list[0]['value'] == "circle-ci" diff --git a/tests/test_custom_events_list.py b/tests/test_custom_events_list.py index 7ccbb0a..fff7c50 100644 --- a/tests/test_custom_events_list.py +++ b/tests/test_custom_events_list.py @@ -1,14 +1,16 @@ from tests.helpers.utils import * def test(): - cli(["custom-events", "delete-all", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) - cli(["custom-events", "create", "-t", "transaction-store", "-f", "data/run-time/custom-events.json"]) - result = cli(["custom-events", "list", "-t", "transaction-store"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + cli(["custom-events", "delete-all", "-t", "test-service", "-y", "VALIDATE_SERVICE"]) + cli(["custom-events", "create", "-t", "test-service", "-f", "data/run-time/custom-events.json"]) + + result = cli(["custom-events", "list", "-t", "test-service"]) assert result['events'][0]['type'] == "VALIDATE_SERVICE" - result = cli(["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE"]) + result = cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE"]) assert result['events'][0]['type'] == "VALIDATE_SERVICE" - result = cli(["custom-events", "list", "-t", "transaction-store", "-y", "VALIDATE_SERVICE", "-ts", "2023-10-10T13:27:51"]) + result = cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE", "-ts", "2023-10-10T13:27:51"]) assert result['events'][0]['type'] == "VALIDATE_SERVICE" diff --git a/tests/test_custom_events_uuid.py b/tests/test_custom_events_uuid.py index 1a49b23..167eec6 100644 --- a/tests/test_custom_events_uuid.py +++ b/tests/test_custom_events_uuid.py @@ -1,24 +1,26 @@ from tests.helpers.utils import * def test(): - result = cli(["custom-events", "create", "-t", "warehousing", "-f", "data/run-time/custom-events-configure.json"]) + result = cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + result = cli(["custom-events", "create", "-t", "test-service", "-f", "data/run-time/custom-events-configure.json"]) uuid = result['uuid'] - result = cli(["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + result = cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid]) assert result['type'] == "CONFIG_SERVICE" - cli(["custom-events", "update-by-uuid", "-t", "warehousing", "-u", uuid, "-f", "data/run-time/custom-events.json"]) + cli(["custom-events", "update-by-uuid", "-t", "test-service", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - result = cli(["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid]) + result = cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid]) assert result['type'] == "VALIDATE_SERVICE" - cli(["custom-events", "delete-by-uuid", "-t", "warehousing", "-u", uuid]) + cli(["custom-events", "delete-by-uuid", "-t", "test-service", "-u", uuid]) # Custom event was deleted, so verify it cannot be retrieved. # with pytest.raises(SystemExit) as excinfo: - result = cli(["custom-events", "get-by-uuid", "-t", "warehousing", "-u", uuid], ReturnType.RAW) + result = cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid], ReturnType.RAW) out = result.stdout assert "HTTP Error 404: Not Found" in out, "An HTTP 404 error code should be thrown" assert result.exit_code == 1 - cli(["custom-events", "delete-all", "-t", "warehousing"]) + cli(["custom-events", "delete-all", "-t", "test-service"]) diff --git a/tests/test_custom_metrics.py b/tests/test_custom_metrics.py index 7f77bf8..35bd579 100644 --- a/tests/test_custom_metrics.py +++ b/tests/test_custom_metrics.py @@ -3,15 +3,16 @@ # As part of this testing, filed: # CET-19691: custom metrics POST API returns 200 response for un-processed metrics older than 6 months def test(): + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + date = today() - print("date = " + str(date)) - cli(["custom-metrics", "delete", "-t", "shipping-integrations", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) - cli(["custom-metrics", "add", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", "3.0"]) - result = cli(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) + cli(["custom-metrics", "delete", "-t", "test-service", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) + cli(["custom-metrics", "add", "-t", "test-service", "-k", "vulnerabilities", "-v", "3.0"]) + result = cli(["custom-metrics", "get", "-t", "test-service", "-k", "vulnerabilities"]) assert result['data'][0]['value'] == 3.0, "should have single value of 3.0" - cli(["custom-metrics", "add-in-bulk", "-t", "shipping-integrations", "-k", "vulnerabilities", "-v", f"{date}=1.0", "-v", f"{date}=2.0"]) - result = cli(["custom-metrics", "get", "-t", "shipping-integrations", "-k", "vulnerabilities"]) + cli(["custom-metrics", "add-in-bulk", "-t", "test-service", "-k", "vulnerabilities", "-v", f"{date}=1.0", "-v", f"{date}=2.0"]) + result = cli(["custom-metrics", "get", "-t", "test-service", "-k", "vulnerabilities"]) assert result['total'] == 3, "should have total of 3 metrics data points" print("There is not a good way to test this today because there is a pre-requisite that the custom metric already exists.") print("If you manually create the custom metric named 'vulnerabilities' you can run these tests.") diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index d9baf74..9bd3099 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,8 +1,11 @@ from tests.helpers.utils import * def test(): - callerTag = "fraud-analyzer" - calleeTag = "backend-worker" + callerTag = "test-service-caller" + calleeTag = "test-service-callee" + + cli(["catalog", "create", "-f", "data/run-time/test-service-caller.yaml"]) + cli(["catalog", "create", "-f", "data/run-time/test-service-callee.yaml"]) cli(["dependencies", "delete-all", "-r", callerTag]) @@ -10,16 +13,16 @@ def test(): cli(["dependencies", "create", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs"]) cli(["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) - result = cli(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) + result = cli(["dependencies", "get", "-r", "test-service-caller", "-e", "test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) assert result["callerTag"] == callerTag, "callerTag should be " + callerTag assert result["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag - cli(["dependencies", "get", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/github/configurations"]) + cli(["dependencies", "get", "-r", "test-service-caller", "-e", "test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) - result = cli(["dependencies", "get-all", "-r", "fraud-analyzer", "-o"]) + result = cli(["dependencies", "get-all", "-r", "test-service-caller", "-o"]) assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in result["dependencies"]) - cli(["dependencies", "delete", "-r", "fraud-analyzer", "-e", "backend-worker", "-m", "GET", "-p", "/api/v1/audit-logs"]) + cli(["dependencies", "delete", "-r", "test-service-caller", "-e", "test-service-callee", "-m", "GET", "-p", "/api/v1/audit-logs"]) cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) cli(["dependencies", "delete-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["dependencies", "delete-all", "-r", "fraud-analyzer"]) + cli(["dependencies", "delete-all", "-r", "test-service-caller"]) diff --git a/tests/test_deploys.py b/tests/test_deploys.py index 3d0ddfa..38d89e7 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -1,48 +1,49 @@ from tests.helpers.utils import * def _add_deploy(): - cli(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys.json"]) + cli(["deploys", "add", "-t", "test-service", "-f", "data/run-time/deploys.json"]) def _delete_all(): cli(["deploys", "delete-all"]) - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + result = cli(["deploys", "list", "-t", "test-service"]) assert len(result['deployments']) == 0, "All deployments for entity should be deleted" def test_deploys(): _delete_all() - result = cli(["deploys", "add", "-t", "shipping-integrations", "-f", "data/run-time/deploys-uuid.json"]) + result = cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + result = cli(["deploys", "add", "-t", "test-service", "-f", "data/run-time/deploys-uuid.json"]) uuid = result['uuid'] print("uuid = " + uuid) _add_deploy() - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + result = cli(["deploys", "list", "-t", "test-service"]) assert any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should find a deploy with uuid" assert result['total'] == 2, "Two deploys should be returned for entity" - cli(["deploys", "update-by-uuid", "-t", "shipping-integrations", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "update-by-uuid", "-t", "test-service", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) + result = cli(["deploys", "list", "-t", "test-service"]) deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" - cli(["deploys", "delete-by-uuid", "-t", "shipping-integrations", "-u", uuid]) - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "delete-by-uuid", "-t", "test-service", "-u", uuid]) + result = cli(["deploys", "list", "-t", "test-service"]) assert not any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should not find a deploy with uuid" assert result['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" _add_deploy() - cli(["deploys", "delete", "-t", "shipping-integrations", "-s", "SHA-123456"]) - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + cli(["deploys", "delete", "-t", "test-service", "-s", "SHA-123456"]) + result = cli(["deploys", "list", "-t", "test-service"]) assert not any(deploy['sha'] == "SHA-123456" for deploy in result['deployments']), "Should not find a deploy with sha that was deleted" _add_deploy() cli(["deploys", "delete-by-filter", "-ty", "DEPLOY"]) - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + result = cli(["deploys", "list", "-t", "test-service"]) assert not any(deploy['type'] == "DEPLOY" for deploy in result['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" result = cli(["deploys", "add", - "-t", "shipping-integrations", + "-t", "test-service", "--email", "julien@tpb.com", "--name", "Julien", "--environment", "PYPI.org", @@ -53,7 +54,7 @@ def test_deploys(): "-c", "abc=123", "-c", "def=456"]) uuid = result['uuid'] - result = cli(["deploys", "list", "-t", "shipping-integrations"]) + result = cli(["deploys", "list", "-t", "test-service"]) deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-123456", "Should find a deploy with sha" assert deploy[0]['deployer']['email'] == "julien@tpb.com", "Email should be set for deploy" diff --git a/tests/test_entity_types.py b/tests/test_entity_types.py index 7ac9e20..713f3a8 100644 --- a/tests/test_entity_types.py +++ b/tests/test_entity_types.py @@ -1,17 +1,19 @@ from tests.helpers.utils import * def test_resource_definitions(capsys): + cli(["entity-types", "create", "-f", "data/run-time/create-entity-type-empty-schema.json"]) + response = cli(["entity-types", "list"]) entity_types = response['definitions'] - assert any(definition['type'] == 'api' for definition in entity_types), "Should find entity type named 'api'" + assert any(definition['type'] == 'cli-test-empty-schema' for definition in entity_types), "Should find entity type named 'cli-test-empty-schema'" + + if any(definition['type'] == 'cli-test-empty-schema' for definition in entity_types): + cli(["entity-types", "delete", "-t", "cli-test-empty-schema"]) + cli(["entity-types", "create", "-f", "data/run-time/create-entity-type-empty-schema.json"]) + + response = cli(["entity-types", "list"]) + assert any(definition['type'] == 'cli-test-empty-schema' for definition in response['definitions']), "Should find entity type named 'cli-test-empty-schema'" + + cli(["entity-types", "get", "-t", "cli-test-empty-schema"]) - if any(definition['type'] == 'test-entity-type' for definition in entity_types): - cli(["entity-types", "delete", "-ty", "test-entity-type"]) -# cli(["entity-types", "create", "-f", "tests/test-resource-definition.json"]) -# -# cli(["entity-types", "list"]) -# assert any(definition['type'] == 'test-entity-type' for definition in response['definitions']), "Should find entity type named 'test-entity-type'" -# -# cli(["entity-types", "get", "-t", "test-resource-definition"]) -# -# cli(["entity-types", "update", "-t", "test-resource-definition", "-f", "tests/test-resource-definition-update.json"]) + cli(["entity-types", "update", "-t", "cli-test-empty-schema", "-f", "data/run-time/update-entity-type-empty-schema.json"]) diff --git a/tests/test_groups_input_file.py b/tests/test_groups_input_file.py new file mode 100644 index 0000000..306e376 --- /dev/null +++ b/tests/test_groups_input_file.py @@ -0,0 +1,16 @@ +from tests.helpers.utils import * + +def test(): + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + cli(["groups", "add", "-t", "test-service", "-f", "tests/test-groups.json"]) + + cli(["groups", "add", "-t", "test-service", "-f", "tests/test-groups.json"]) + response = cli(["groups", "get", "-t", "test-service"]) + assert any(group['tag'] == 'group1' for group in response['groups']), "should find group1 in list of groups" + assert any(group['tag'] == 'group2' for group in response['groups']), "should find group2 in list of groups" + + cli(["groups", "delete", "-t", "test-service", "-f", "tests/test-groups.json"]) + response = cli(["groups", "get", "-t", "test-service"]) + + assert not(any(group['tag'] == 'group1' for group in response['groups'])), "should not find group1 in list of groups" + assert not(any(group['tag'] == 'group2' for group in response['groups'])), "should not find group2 in list of groups" diff --git a/tests/test_integrations_aws.py b/tests/test_integrations_aws.py index cddacf3..86cd340 100644 --- a/tests/test_integrations_aws.py +++ b/tests/test_integrations_aws.py @@ -32,9 +32,9 @@ def test_integrations_aws_get(): cli(["integrations", "aws", "get", "-a", "123456"]) @responses.activate -def test_integrations_aws_get_all(): +def test_integrations_aws_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/aws/configurations", status=200) - cli(["integrations", "aws", "get-all"]) + cli(["integrations", "aws", "list"]) @responses.activate def test_integrations_aws_update(): diff --git a/tests/test_integrations_azure_devops.py b/tests/test_integrations_azure_devops.py index 8f53728..484b49c 100644 --- a/tests/test_integrations_azure_devops.py +++ b/tests/test_integrations_azure_devops.py @@ -34,9 +34,9 @@ def test_integrations_azure_devops_get(): cli(["integrations", "azure-devops", "get", "-a", "test"]) @responses.activate -def test_integrations_azure_devops_get_all(): +def test_integrations_azure_devops_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-devops/configurations", json={}, status=200) - cli(["integrations", "azure-devops", "get-all"]) + cli(["integrations", "azure-devops", "list"]) @responses.activate def test_integrations_azure_devops_get_default(): diff --git a/tests/test_integrations_azure_resources.py b/tests/test_integrations_azure_resources.py index 9892db3..49f2576 100644 --- a/tests/test_integrations_azure_resources.py +++ b/tests/test_integrations_azure_resources.py @@ -34,9 +34,9 @@ def test_integrations_azure_resources_get(): cli(["integrations", "azure-resources", "get", "-a", "test"]) @responses.activate -def test_integrations_azure_resources_get_all(): +def test_integrations_azure_resources_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/azure-resources/configurations", json={}, status=200) - cli(["integrations", "azure-resources", "get-all"]) + cli(["integrations", "azure-resources", "list"]) @responses.activate def test_integrations_azure_resources_get_default(): diff --git a/tests/test_integrations_circleci.py b/tests/test_integrations_circleci.py index 903ddbd..e738404 100644 --- a/tests/test_integrations_circleci.py +++ b/tests/test_integrations_circleci.py @@ -34,9 +34,9 @@ def test_integrations_circle_ci_get(): cli(["integrations", "circleci", "get", "-a", "test"]) @responses.activate -def test_integrations_circle_ci_get_all(): +def test_integrations_circle_ci_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/circleci/configurations", json={}, status=200) - cli(["integrations", "circleci", "get-all"]) + cli(["integrations", "circleci", "list"]) @responses.activate def test_integrations_circle_ci_get_default(): diff --git a/tests/test_integrations_coralogix.py b/tests/test_integrations_coralogix.py index d84c9cd..52bcd53 100644 --- a/tests/test_integrations_coralogix.py +++ b/tests/test_integrations_coralogix.py @@ -34,9 +34,9 @@ def test_integrations_coralogix_get(): cli(["integrations", "coralogix", "get", "-a", "test"]) @responses.activate -def test_integrations_coralogix_get_all(): +def test_integrations_coralogix_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/coralogix/configurations", json={}, status=200) - cli(["integrations", "coralogix", "get-all"]) + cli(["integrations", "coralogix", "list"]) @responses.activate def test_integrations_coralogix_get_default(): diff --git a/tests/test_integrations_datadog.py b/tests/test_integrations_datadog.py index 706c05f..bae6289 100644 --- a/tests/test_integrations_datadog.py +++ b/tests/test_integrations_datadog.py @@ -34,9 +34,9 @@ def test_integrations_datadog_get(): cli(["integrations", "datadog", "get", "-a", "test"]) @responses.activate -def test_integrations_datadog_get_all(): +def test_integrations_datadog_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/datadog/configurations", json={}, status=200) - cli(["integrations", "datadog", "get-all"]) + cli(["integrations", "datadog", "list"]) @responses.activate def test_integrations_datadog_get_default(): diff --git a/tests/test_integrations_github.py b/tests/test_integrations_github.py index 951fe06..1490b9e 100644 --- a/tests/test_integrations_github.py +++ b/tests/test_integrations_github.py @@ -34,9 +34,9 @@ def test_integrations_github_get(): cli(["integrations", "github", "get", "-a", "test"]) @responses.activate -def test_integrations_github_get_all(): +def test_integrations_github_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/github/configurations", json={}, status=200) - cli(["integrations", "github", "get-all"]) + cli(["integrations", "github", "list"]) @responses.activate def test_integrations_github_get_default(): diff --git a/tests/test_integrations_gitlab.py b/tests/test_integrations_gitlab.py index a46f97e..8ff6e01 100644 --- a/tests/test_integrations_gitlab.py +++ b/tests/test_integrations_gitlab.py @@ -34,9 +34,9 @@ def test_integrations_gitlab_get(): cli(["integrations", "gitlab", "get", "-a", "test"]) @responses.activate -def test_integrations_gitlab_get_all(): +def test_integrations_gitlab_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/gitlab/configurations", json={}, status=200) - cli(["integrations", "gitlab", "get-all"]) + cli(["integrations", "gitlab", "list"]) @responses.activate def test_integrations_gitlab_get_default(): diff --git a/tests/test_integrations_incidentio.py b/tests/test_integrations_incidentio.py index 6100dcc..325c26d 100644 --- a/tests/test_integrations_incidentio.py +++ b/tests/test_integrations_incidentio.py @@ -34,9 +34,9 @@ def test_integrations_incidentio_get(): cli(["integrations", "incidentio", "get", "-a", "test"]) @responses.activate -def test_integrations_incidentio_get_all(): +def test_integrations_incidentio_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/incidentio/configurations", json={}, status=200) - cli(["integrations", "incidentio", "get-all"]) + cli(["integrations", "incidentio", "list"]) @responses.activate def test_integrations_incidentio_get_default(): diff --git a/tests/test_integrations_launchdarkly.py b/tests/test_integrations_launchdarkly.py index b5aa665..5657d87 100644 --- a/tests/test_integrations_launchdarkly.py +++ b/tests/test_integrations_launchdarkly.py @@ -34,9 +34,9 @@ def test_integrations_launchdarkly_get(): cli(["integrations", "launchdarkly", "get", "-a", "test"]) @responses.activate -def test_integrations_launchdarkly_get_all(): +def test_integrations_launchdarkly_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/launchdarkly/configurations", json={}, status=200) - cli(["integrations", "launchdarkly", "get-all"]) + cli(["integrations", "launchdarkly", "list"]) @responses.activate def test_integrations_launchdarkly_get_default(): diff --git a/tests/test_integrations_newrelic.py b/tests/test_integrations_newrelic.py index 71846b4..b54e205 100644 --- a/tests/test_integrations_newrelic.py +++ b/tests/test_integrations_newrelic.py @@ -34,9 +34,9 @@ def test_integrations_newrelic_get(): cli(["integrations", "newrelic", "get", "-a", "test"]) @responses.activate -def test_integrations_newrelic_get_all(): +def test_integrations_newrelic_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/newrelic/configurations", json={}, status=200) - cli(["integrations", "newrelic", "get-all"]) + cli(["integrations", "newrelic", "list"]) @responses.activate def test_integrations_newrelic_get_default(): diff --git a/tests/test_integrations_pagerduty.py b/tests/test_integrations_pagerduty.py index 96752e4..7c5271d 100644 --- a/tests/test_integrations_pagerduty.py +++ b/tests/test_integrations_pagerduty.py @@ -34,9 +34,9 @@ def test_integrations_circle_ci_get(): cli(["integrations", "pagerduty", "get", "-a", "test"]) @responses.activate -def test_integrations_circle_ci_get_all(): +def test_integrations_circle_ci_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/pagerduty/configurations", json={}, status=200) - cli(["integrations", "pagerduty", "get-all"]) + cli(["integrations", "pagerduty", "list"]) @responses.activate def test_integrations_circle_ci_get_default(): diff --git a/tests/test_integrations_prometheus.py b/tests/test_integrations_prometheus.py index 3bb4c66..83ff80c 100644 --- a/tests/test_integrations_prometheus.py +++ b/tests/test_integrations_prometheus.py @@ -34,9 +34,9 @@ def test_integrations_prometheus_get(): cli(["integrations", "prometheus", "get", "-a", "test"]) @responses.activate -def test_integrations_prometheus_get_all(): +def test_integrations_prometheus_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/prometheus/configurations", json={}, status=200) - cli(["integrations", "prometheus", "get-all"]) + cli(["integrations", "prometheus", "list"]) @responses.activate def test_integrations_prometheus_get_default(): diff --git a/tests/test_integrations_sonarqube.py b/tests/test_integrations_sonarqube.py index 0f30e71..9195582 100644 --- a/tests/test_integrations_sonarqube.py +++ b/tests/test_integrations_sonarqube.py @@ -55,9 +55,9 @@ def test_integrations_sonarqube_get(): cli(["integrations", "sonarqube", "get", "-a", "test"]) @responses.activate -def test_integrations_sonarqube_get_all(): +def test_integrations_sonarqube_list(): responses.add(responses.GET, os.getenv("CORTEX_BASE_URL") + "/api/v1/sonarqube/configurations", json={}, status=200) - cli(["integrations", "sonarqube", "get-all"]) + cli(["integrations", "sonarqube", "list"]) @responses.activate def test_integrations_sonarqube_get_default(): From 961f830f5b12258f0db45eb1e9643b417dd54409 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Thu, 5 Jun 2025 19:14:04 -0700 Subject: [PATCH 49/56] A little more cleanup - remove references to public-api-test --- data/run-time/docs-entity.yaml | 2 +- data/run-time/groups-entity.yaml | 2 +- data/run-time/scorecard.yaml | 6 +++--- data/run-time/scorecard_drafts.yaml | 6 +++--- data/run-time/test_plugins.json | 4 ++-- data/run-time/test_plugins_invalid_role.json | 4 ++-- data/run-time/test_plugins_manager.json | 4 ++-- tests/test_catalog_list_include_owners.py | 4 +++- tests/test_catalog_list_page.py | 4 +++- tests/test_catalog_list_page_size.py | 4 +++- 10 files changed, 23 insertions(+), 17 deletions(-) diff --git a/data/run-time/docs-entity.yaml b/data/run-time/docs-entity.yaml index 3201f18..5b9805f 100644 --- a/data/run-time/docs-entity.yaml +++ b/data/run-time/docs-entity.yaml @@ -5,4 +5,4 @@ info: x-cortex-tag: docs-entity x-cortex-type: service x-cortex-groups: - - public-api-test + - cli-test diff --git a/data/run-time/groups-entity.yaml b/data/run-time/groups-entity.yaml index 42e4743..6cddbcd 100644 --- a/data/run-time/groups-entity.yaml +++ b/data/run-time/groups-entity.yaml @@ -5,4 +5,4 @@ info: x-cortex-tag: groups-entity x-cortex-type: service x-cortex-groups: - - public-api-test + - cli-test diff --git a/data/run-time/scorecard.yaml b/data/run-time/scorecard.yaml index 910e1ef..7345cf7 100644 --- a/data/run-time/scorecard.yaml +++ b/data/run-time/scorecard.yaml @@ -1,5 +1,5 @@ -tag: public-api-test-scorecard -name: Public API Test Scorecard +tag: cli-test-scorecard +name: CLI Test Scorecard description: Used to test Cortex public API draft: false ladder: @@ -17,5 +17,5 @@ rules: filter: category: SERVICE filter: - query: 'entity.tag() == "user-profile-metadata-service"' + query: 'entity.tag() == "test-service"' category: SERVICE diff --git a/data/run-time/scorecard_drafts.yaml b/data/run-time/scorecard_drafts.yaml index 322c13f..5a8d558 100644 --- a/data/run-time/scorecard_drafts.yaml +++ b/data/run-time/scorecard_drafts.yaml @@ -1,5 +1,5 @@ -tag: public-api-test-draft-scorecard -name: Public API Test Draft Scorecard +tag: cli-test-draft-scorecard +name: CLI Test Draft Scorecard description: Used to test Scorecard drafts with Cortex public API draft: true ladder: @@ -17,5 +17,5 @@ rules: filter: category: SERVICE filter: - query: 'entity.tag() == "user-profile-metadata-service"' + query: 'entity.tag() == "test-service"' category: SERVICE diff --git a/data/run-time/test_plugins.json b/data/run-time/test_plugins.json index c3195d1..aa30aae 100644 --- a/data/run-time/test_plugins.json +++ b/data/run-time/test_plugins.json @@ -14,7 +14,7 @@ "description": "Just testin' plugin uploads", "isDraft": false, "minimumRoleRequired": "VIEWER", - "name": "Public API Test Plugin", - "tag": "public-api-test-plugin" + "name": "CLI Test Plugin", + "tag": "cli-test-plugin" } diff --git a/data/run-time/test_plugins_invalid_role.json b/data/run-time/test_plugins_invalid_role.json index 45ed9a6..0fd6c0a 100644 --- a/data/run-time/test_plugins_invalid_role.json +++ b/data/run-time/test_plugins_invalid_role.json @@ -14,7 +14,7 @@ "description": "Just testing plugin permissions", "isDraft": true, "minimumRoleRequired": "ADMIN", - "name": "Public API Test Plugin Admin", - "tag": "public-api-test-plugin-admin" + "name": "CLI Test Plugin Admin", + "tag": "cli-test-plugin-admin" } diff --git a/data/run-time/test_plugins_manager.json b/data/run-time/test_plugins_manager.json index 68cd95b..35a4bbf 100644 --- a/data/run-time/test_plugins_manager.json +++ b/data/run-time/test_plugins_manager.json @@ -14,7 +14,7 @@ "description": "Just testing plugin permissions", "isDraft": true, "minimumRoleRequired": "MANAGER", - "name": "Public API Test Plugin Manager", - "tag": "public-api-test-plugin-manager" + "name": "CLI Test Plugin Manager", + "tag": "cli-test-plugin-manager" } diff --git a/tests/test_catalog_list_include_owners.py b/tests/test_catalog_list_include_owners.py index ccddb25..1050a58 100644 --- a/tests/test_catalog_list_include_owners.py +++ b/tests/test_catalog_list_include_owners.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(capsys): - response = cli(["catalog", "list", "-g", "public-api-test", "-io"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test", "-io"]) assert not(response['entities'][0]['owners']['teams'] is None), "Teams array should be returned in result" diff --git a/tests/test_catalog_list_page.py b/tests/test_catalog_list_page.py index 61bcc69..5dbb0bd 100644 --- a/tests/test_catalog_list_page.py +++ b/tests/test_catalog_list_page.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(capsys): - response = cli(["catalog", "list", "-g", "public-api-test", "-p", "0"]) + cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test", "-p", "0"]) assert (len(response['entities']) > 0) diff --git a/tests/test_catalog_list_page_size.py b/tests/test_catalog_list_page_size.py index afab118..664a387 100644 --- a/tests/test_catalog_list_page_size.py +++ b/tests/test_catalog_list_page_size.py @@ -1,5 +1,7 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "list", "-g", "public-api-test", "-p", "0", "-z", "1"]) + response = cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + + response = cli(["catalog", "list", "-g", "cli-test", "-p", "0", "-z", "1"]) assert (len(response['entities']) == 1) From a345b7e60f058a5724b5eb051161ff2bd05b766b Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 11 Jun 2025 15:08:06 -0700 Subject: [PATCH 50/56] Test clean up --- .github/workflows/test-pr.yml | 12 +- .gitignore | 3 + HISTORY.md | 9 +- Justfile | 52 +- Makefile | 268 -- cortexapps_cli/.cortex.py.swp | Bin 16384 -> 0 bytes cortexapps_cli/cli.py | 7 +- cortexapps_cli/command_options.py | 4 + cortexapps_cli/commands/api_keys.py | 1 + cortexapps_cli/commands/audit_logs.py | 1 + cortexapps_cli/commands/backup.py | 54 +- cortexapps_cli/commands/catalog.py | 6 +- cortexapps_cli/commands/custom_data.py | 1 + cortexapps_cli/commands/custom_events.py | 1 + cortexapps_cli/commands/entity_types.py | 3 +- cortexapps_cli/commands/gitops_logs.py | 1 + cortexapps_cli/commands/initiatives.py | 71 +- cortexapps_cli/commands/ip_allowlist.py | 28 +- cortexapps_cli/commands/packages.py | 1 + cortexapps_cli/commands/plugins.py | 44 +- cortexapps_cli/commands/scorecards.py | 8 +- cortexapps_cli/commands/workflows.py | 30 +- cortexapps_cli/cortex.py | 4011 ----------------- cortexapps_cli/cortex_client.py | 1 + cortexapps_cli/utils.py | 11 +- .../catalog/cli-test-archive-entity.yaml} | 2 +- .../catalog/cli-test-create-entity.yaml} | 2 +- .../catalog/cli-test-delete-entity.yaml} | 2 +- .../catalog/cli-test-docs-entity.yaml} | 2 +- .../catalog/cli-test-domain-child.yaml} | 4 +- .../catalog/cli-test-domain-parent.yaml} | 4 +- .../catalog/cli-test-groups-entity.yaml} | 2 +- .../catalog/cli-test-service-callee.yaml} | 4 +- .../catalog/cli-test-service-caller.yaml} | 2 +- .../catalog/cli-test-service-group-1.yaml} | 2 +- .../catalog/cli-test-service-group-2.yaml} | 2 +- .../catalog/cli-test-service-links.yaml} | 2 +- .../catalog/cli-test-service-metadata.yaml} | 2 +- .../cli-test-service-test-team-1.yaml} | 4 +- .../cli-test-service-test-team-2.yaml} | 4 +- .../catalog/cli-test-service.yaml} | 2 +- .../catalog/cli-test-team-1.yaml} | 2 +- .../catalog/cli-test-team-2.yaml} | 2 +- .../catalog/cli-test-team-child.yaml} | 2 +- .../catalog/cli-test-team-parent.yaml} | 4 +- .../catalog/cli-test-unarchive-entity.yaml} | 2 +- .../entity-types/cli-test.json} | 2 +- data/import/ip-allowlist/ip-allowlist.json | 8 + .../import/plugins/cli-test-plugin.json | 4 +- .../scorecards/cli-test-draft-scorecard.yaml | 4 +- .../import/scorecards/cli-test-scorecard.yaml | 6 +- data/import/workflows/cli-test-workflow.yaml | 20 + data/run-time/custom-data-bulk.json | 4 +- data/run-time/dependencies-bulk.json | 4 +- ...ty-schema.json => entity-type-update.json} | 0 data/run-time/scorecard.yaml | 21 - data/run-time/scorecard_drafts.yaml | 21 - tests/helpers/utils.py | 2 - tests/test_catalog_archive_entity.py | 1 - tests/test_catalog_create_entity.py | 6 +- tests/test_catalog_create_entity_viewer.py | 2 +- tests/test_catalog_delete_entity.py | 9 +- tests/test_catalog_get_entity_details.py | 6 +- ...st_catalog_get_entity_details_hierarchy.py | 5 +- tests/test_catalog_include_links.py | 2 - tests/test_catalog_include_metadata.py | 2 - tests/test_catalog_include_nested_fields.py | 9 +- tests/test_catalog_list_by_group_multiple.py | 3 - tests/test_catalog_list_by_group_single.py | 2 - tests/test_catalog_list_by_owners_multiple.py | 7 +- tests/test_catalog_list_by_owners_single.py | 5 +- tests/test_catalog_list_by_types.py | 2 - tests/test_catalog_list_entity_descriptors.py | 4 +- ...st_catalog_list_entity_descriptors_page.py | 1 - ...talog_list_entity_descriptors_page_size.py | 2 - ...st_catalog_list_entity_descriptors_yaml.py | 4 +- tests/test_catalog_list_include_archived.py | 7 +- tests/test_catalog_list_include_owners.py | 2 - tests/test_catalog_list_page.py | 2 - tests/test_catalog_list_page_size.py | 2 - ...test_catalog_retrieve_entity_descriptor.py | 6 +- ...catalog_retrieve_entity_descriptor_yaml.py | 6 +- tests/test_catalog_retrieve_entity_details.py | 6 +- ...etrieve_entity_details_hierarchy_fields.py | 6 +- tests/test_catalog_unarchive_entity.py | 7 +- tests/test_config_file.py | 73 +- ...st_custom_data_create_or_update_in_bulk.py | 4 +- tests/test_custom_data_delete.py | 10 +- tests/test_custom_data_list.py | 4 +- tests/test_custom_events_list.py | 12 +- tests/test_custom_events_uuid.py | 16 +- tests/test_custom_metrics.py | 27 +- tests/test_dependencies.py | 17 +- tests/test_deploys.py | 27 +- tests/test_entity_types.py | 16 +- tests/test_export.py | 4 + tests/test_gitops_logs.py | 10 +- tests/test_groups.py | 12 +- tests/test_groups_input_file.py | 11 +- tests/test_import.py | 6 + tests/test_ip_allowlist.py | 2 +- tests/test_packages.py | 46 +- tests/test_plugins.py | 18 +- tests/test_scim.py | 40 +- tests/test_scorecards.py | 45 +- tests/test_stdin.py | 4 +- tests/test_workflows.py | 8 +- 107 files changed, 528 insertions(+), 4778 deletions(-) delete mode 100644 Makefile delete mode 100644 cortexapps_cli/.cortex.py.swp delete mode 100755 cortexapps_cli/cortex.py rename data/{run-time/archive-entity.yaml => import/catalog/cli-test-archive-entity.yaml} (82%) rename data/{run-time/create-entity.yaml => import/catalog/cli-test-create-entity.yaml} (81%) rename data/{run-time/delete-entity.yaml => import/catalog/cli-test-delete-entity.yaml} (83%) rename data/{run-time/docs-entity.yaml => import/catalog/cli-test-docs-entity.yaml} (82%) rename data/{run-time/test-domain-child.yaml => import/catalog/cli-test-domain-child.yaml} (66%) rename data/{run-time/test-domain-parent.yaml => import/catalog/cli-test-domain-parent.yaml} (64%) rename data/{run-time/groups-entity.yaml => import/catalog/cli-test-groups-entity.yaml} (81%) rename data/{run-time/test-service-callee.yaml => import/catalog/cli-test-service-callee.yaml} (99%) rename data/{run-time/test-service-caller.yaml => import/catalog/cli-test-service-caller.yaml} (73%) rename data/{run-time/test-service-group-1.yaml => import/catalog/cli-test-service-group-1.yaml} (75%) rename data/{run-time/test-service-group-2.yaml => import/catalog/cli-test-service-group-2.yaml} (75%) rename data/{run-time/test-service-links.yaml => import/catalog/cli-test-service-links.yaml} (84%) rename data/{run-time/test-service-metadata.yaml => import/catalog/cli-test-service-metadata.yaml} (80%) rename data/{run-time/test-service-test-team-1.yaml => import/catalog/cli-test-service-test-team-1.yaml} (71%) rename data/{run-time/test-service-test-team-2.yaml => import/catalog/cli-test-service-test-team-2.yaml} (70%) rename data/{run-time/test-service.yaml => import/catalog/cli-test-service.yaml} (86%) rename data/{run-time/test-team-1.yaml => import/catalog/cli-test-team-1.yaml} (88%) rename data/{run-time/test-team-2.yaml => import/catalog/cli-test-team-2.yaml} (87%) rename data/{run-time/test-team-child.yaml => import/catalog/cli-test-team-child.yaml} (87%) rename data/{run-time/test-team-parent.yaml => import/catalog/cli-test-team-parent.yaml} (80%) rename data/{run-time/unarchive-entity.yaml => import/catalog/cli-test-unarchive-entity.yaml} (80%) rename data/{run-time/create-entity-type-empty-schema.json => import/entity-types/cli-test.json} (77%) create mode 100644 data/import/ip-allowlist/ip-allowlist.json rename tests/test_plugins.json => data/import/plugins/cli-test-plugin.json (85%) rename tests/test_scorecards_draft.yaml => data/import/scorecards/cli-test-draft-scorecard.yaml (87%) rename tests/test_scorecards.yaml => data/import/scorecards/cli-test-scorecard.yaml (83%) create mode 100644 data/import/workflows/cli-test-workflow.yaml rename data/run-time/{update-entity-type-empty-schema.json => entity-type-update.json} (100%) delete mode 100644 data/run-time/scorecard.yaml delete mode 100644 data/run-time/scorecard_drafts.yaml create mode 100644 tests/test_export.py create mode 100644 tests/test_import.py diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml index 68511dc..6287db7 100644 --- a/.github/workflows/test-pr.yml +++ b/.github/workflows/test-pr.yml @@ -12,12 +12,8 @@ on: env: AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} CORTEX_API_KEY: ${{ secrets.CORTEX_API_KEY }} - CORTEX_API_KEY_RICH_SANDBOX: ${{ secrets.CORTEX_API_KEY_RICH_SANDBOX }} CORTEX_API_KEY_VIEWER: ${{ secrets.CORTEX_API_KEY_VIEWER }} CORTEX_BASE_URL: ${{ vars.CORTEX_BASE_URL }} - GH_PAT: ${{ secrets.GH_PAT }} - GH_WEBHOOK_SECRET: ${{ secrets.GH_WEBHOOK_SECRET }} - CORTEX_GH_WEBHOOK_URL: ${{ vars.CORTEX_GH_WEBHOOK_URL }} jobs: test: @@ -41,6 +37,7 @@ jobs: - name: Install dependencies run: | + apt update && apt install just python -m pip install --upgrade pip pip install poetry poetry-audit-plugin pytest-cov pytest pytest-xdist @@ -53,11 +50,6 @@ jobs: echo "[default]" > $HOME/.cortex/config echo "api_key = $CORTEX_API_KEY" >> $HOME/.cortex/config echo "base_url = $CORTEX_BASE_URL" >> $HOME/.cortex/config - echo "[rich-sandbox]" >> $HOME/.cortex/config - echo "api_key = $CORTEX_API_KEY_RICH_SANDBOX" >> $HOME/.cortex/config - pwd - ls -l $HOME/.cortex - cat $HOME/.cortex/config shell: bash - name: Install package @@ -67,4 +59,4 @@ jobs: - name: Test with pytest run: | - make all-cli + just test-all diff --git a/.gitignore b/.gitignore index da0392a..2d5197c 100644 --- a/.gitignore +++ b/.gitignore @@ -10,4 +10,7 @@ coverage.json .github/workflows/test.yml .export report.html +import.html +report*.html .load-data-done + diff --git a/HISTORY.md b/HISTORY.md index 45b9153..ed8465e 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -15,17 +15,20 @@ Release History delete -ty -> delete -t **TODO** -- Do a full reconciliation of all flags +- DONE: Do a full reconciliation of all flags - DONE: Add -debug flag - DONE: Test input files, env vars - DONE: Add support for adding groups via JSON file? - DONE: Check all get/list sub-commands could change get-all to list, but prefer to make no change - backup export -> don't include cloud entities -- backup import -> need to complete -- deleting existing entity types -> would be good to loop over entity types with a certain filter +- DONE: backup import -> need to complete +- DONE: deleting existing entity types -> would be good to loop over entity types with a certain filter base initially on name prefix + --> UPDATE: fix was to incorporate force when creating, force will delete existing entity type - warning about using env vars +- DONE: default cortex_base_url = https://api.getcortexapp.com +- DONE: csv export -> option to not show header line, get rid of EOL diff --git a/Justfile b/Justfile index 9269661..5c78318 100644 --- a/Justfile +++ b/Justfile @@ -1,4 +1,5 @@ cortex_cli := 'poetry run cortex' +pytest := 'PYTHONPATH=. poetry run pytest -rA' export CORTEX_API_KEY := env('CORTEX_API_KEY') export CORTEX_BASE_URL := env('CORTEX_BASE_URL', "https://api.getcortexapp.com") @@ -7,49 +8,28 @@ export CORTEX_API_KEY_VIEWER := env('CORTEX_API_KEY_VIEWER') help: @just -l +_setup: + @if [ -f .coverage ]; then rm .coverage; fi + # Run all tests -test-all: test-parallel test-serial +test-all: _setup test-parallel test-serial # Run tests that can run in parallel -test-parallel: - PYTHONPATH=. poetry run pytest -rA -n auto -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests +test-parallel: test-import + {{pytest}} -n auto -m "not setup and not serial" --html=report-parallel.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests # Run all tests serially - helpful to see if any tests seem to be hanging -_test-all-serial: - PYTHONPATH=. poetry run pytest -rA -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests +_test-all-individual: test-import + {{pytest}} --html=report-all-invidual.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests # Run tests that have to run sequentially -test-serial: - @if [ -f .coverage ]; then rm .coverage; fi - PYTHONPATH=. poetry run pytest -rA -n auto -m "serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests +test-serial: test-import + {{pytest}} -n auto -m "serial" --html=report-serial.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing tests + +# Run import test, a pre-requisite for any tests that rely on test data. +test-import: + {{pytest}} tests/test_import.py --cov=cortexapps_cli --cov-report= # Run a single test, ie: just test tests/test_catalog.py test testname: - poetry run pytest {{testname}} - -_load-data: - #!/bin/bash - if [[ -f .load-data-done ]] - then - echo "Not loading test data since .load-data-done file exists." - exit - fi - - # Delete existing entity definitions and any entities to prevent getting a conflict error. - # TODO: modify cli import to add a flag to manage this - for entity_type_file in `ls -1 data/entity-types/*`; do - entity_type=$(basename ${entity_type_file} .json) - echo "Deleting entity type: ${entity_type}" - # Delete all instances of this type - {{cortex_cli}} catalog delete-by-type -t ${entity_type} - # Now delete the type if it exists - ({{cortex_cli}} entity-types get -t ${entity_type} && {{cortex_cli}} entity-types delete -t ${entity_type}) || : - done - - {{cortex_cli}} backup import -d data - - # Archive a couple of entities in order to test commands that include or exclude archived entities - {{cortex_cli}} catalog archive -t robot-item-sorter - {{cortex_cli}} catalog archive -t inventory-scraper - - touch .load-data-done + {{pytest}} {{testname}} diff --git a/Makefile b/Makefile deleted file mode 100644 index 63aee46..0000000 --- a/Makefile +++ /dev/null @@ -1,268 +0,0 @@ -# -# Environment Variables -# -UNAME_S := $(shell uname -s) - -PYTHON_VENV = ~/.venv/cortex-cli-test - -ifeq ($(CORTEX_CLI),) ## Cortex CLI, defaults to CLI in the repository -export CORTEX_CLI := . $(PYTHON_VENV)/bin/activate; python3 ./cortexapps_cli/cortex.py -q -endif - -ifeq ($(CORTEX_GH_ALIAS),) ## Github alias defined in Cortex GitHub integration, defaults to public-api-test -export CORTEX_GH_ALIAS := public-api-test -endif - -# Change this once we can get WEBHOOK_URL via Cortex API -ifeq ($(CORTEX_GH_WEBHOOK_URL),) ## The GitHub webhook URL defined in Cortex -export CORTEX_GH_WEBHOOK_URL=https://api.getcortexapp.com/api/v1/github/manual-webhook/e0b77380-e7af-4e14-8563-8168651e307e/$(CORTEX_GH_ALIAS) -endif - -# Should only need to change this if using enterprise GitHub. -ifeq ($(GH_URL),) ## GitHub URL, will be used to call the GitHub API to create a webhook -export GH_URL=https://api.github.com -endif - -ifeq ($(GH_ORG),) ## GitHub organization used for GitHub tests -export GH_ORG=cortextests -endif - -ifeq ($(GH_REPO),) ## GitHub repository used for GitHub tests -export GH_REPO=public-api-test-repo -endif - -ifeq ($(CORTEX_API_KEY),) ## Required; Cortex API key with Admin permission - $(error CORTEX_API_KEY is not set) -endif - -ifeq ($(CORTEX_BASE_URL),) ## Required; Cortex base URL for API, ie for cloud this would be https://api.getcortexapp.com - $(error CORTEX_BASE_URL is not set) -endif -ifeq ($(CORTEX_BASE_URL),http://api.local.getcortexapp.com:8080) -export CORTEX_GH_WEBHOOK_URL=$(shell ./scripts/ngrok.sh)/api/v1/github/manual-webhook/a4037bca-c83e-4058-8550-8393826ff642/$(CORTEX_GH_ALIAS) - ifeq ($(NGROK_PORT),) - export NGROK_PORT=8081 - endif -endif - -ifeq ($(CORTEX_ENV),) ## Cortex environment, defaults to 'default'; used to distinguish make build targets between environments; if not set inferred from CORTEX_BASE_URL - ifeq ($(CORTEX_BASE_URL),http://api.local.getcortexapp.com:8080) - export CORTEX_ENV=local - else ifeq ($(CORTEX_BASE_URL),https://api.staging.getcortexapp.com) - export CORTEX_ENV=staging - else ifeq ($(CORTEX_BASE_URL),https://api.getcortexapp.com) - export CORTEX_ENV=prod - else ifeq ($(CORTEX_BASE_URL),http://api.helm.getcortexapp.com) - export CORTEX_ENV=helm - else ifeq ($(CORTEX_ENV),) - export CORTEX_ENV=default - endif -endif - -ifneq ($(CORTEX_TENANT),) ## Used with CORTEX_ENV, if set can help distinguish between different tenants in the same environment - export BUILD_SUBDIR=$(CORTEX_ENV)-$(CORTEX_TENANT) -else - export BUILD_SUBDIR=$(CORTEX_ENV) -endif - -# -# Configuration variables -# -BUILD_DIR = build/$(BUILD_SUBDIR) -BUILD_TOOLS_DIR = $(BUILD_DIR)/tools -export FEATURE_FLAG_EXPORT=$(BUILD_DIR)/ff/feature-flags.json -DATA_DIR = data -ENTITIES := $(shell find $(DATA_DIR) -type f) - -ARCHIVE_ENTITIES = robot-item-sorter inventory-scraper -ARCHIVE_TARGETS := $(ARCHIVE_ENTITIES:%=$(BUILD_DIR)/%.archive) - -CATALOG_ENTITIES := $(wildcard data/catalog/*.yaml) -CATALOG_TARGETS := $(CATALOG_ENTITIES:data/catalog/%.yaml=$(BUILD_DIR)/%.yaml) - -CUSTOM_RESOURCES := $(wildcard data/resource-definitions/*.json) -CUSTOM_RESOURCE_TARGETS := $(CUSTOM_RESOURCES:data/resource-definitions/%.json=$(BUILD_DIR)/%.json) - -FEATURE_FLAG_VARS := $(shell env | grep CORTEX_FF | cut -d= -f1) -FEATURE_FLAGS = $(patsubst CORTEX_FF_%,%,$(FEATURE_FLAG_VARS)) -FEATURE_FLAG_ENVSUBST := $(FEATURE_FLAGS:%=$(BUILD_DIR)/ff/envsubst/%) - -all: info setup feature-flags-dump load-data github test-api ## Setup environment, load data and test -all-cli: all test-cli - -.PHONY: info -info: - @echo "Running test for: $(BUILD_SUBDIR)" - -.PHONY: setup -setup: tools venv ## Setup python virtual environment for testing - -# -# -# Tools setup -# -# -.PHONY: tools -tools: brew jq python3 - -.PHONY: brew -brew: $(BUILD_TOOLS_DIR)/brew | $(BUILD_TOOLS_DIR) - -$(BUILD_TOOLS_DIR)/brew: | $(BUILD_TOOLS_DIR) -ifeq ($(UNAME_S),Darwin) - @which brew > /dev/null || /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" -endif - @touch $@ - -.PHONY: jq -jq: $(BUILD_TOOLS_DIR)/jq | $(BUILD_TOOLS_DIR) - -$(BUILD_TOOLS_DIR)/jq: -ifeq ($(UNAME_S),Darwin) - @which jq > /dev/null || brew install jq -else - @which jq > /dev/null || (echo "jq is not installed"; exit) -endif - @touch $@ - -.PHONY: python3 -python3: ${BUILD_TOOLS_DIR}/python3 | $(BUILD_TOOLS_DIR) - -${BUILD_TOOLS_DIR}/python3: -ifeq ($(UNAME_S),Darwin) - @which python3 > /dev/null || brew install python3 -else - @which python3 > /dev/null || (echo "python3 is not installed"; exit 1) -endif - @touch $@ - -.PHONY: venv -venv: $(PYTHON_VENV) - -$(PYTHON_VENV): requirements.txt - python3 -m venv $@ - . $@/bin/activate; python3 -m pip install --upgrade -r $^ - touch $@ - -.PHONY: help -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | cut -d':' -f1- | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: vars -vars: ## Display variables used for testing - @grep -E 'ifeq.*## .*$$' $(MAKEFILE_LIST) | grep -v grep | sort | sed 's/ifeq.*(//' | sed 's/).*)//' | awk 'BEGIN {FS = "## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: load-data -load-data: catalog-entities archive-entities resource-definitions ## Load data from 'data' directory into Cortex - -.PHONY: archive-entities -archive-entities: $(ARCHIVE_TARGETS) | $(BUILD_DIR) - -.PHONY: catalog-entities -catalog-entities: $(CATALOG_TARGETS) | $(BUILD_DIR) - -$(BUILD_DIR)/%.archive: $(BUILD_TOOLS_DIR)/python3 - @$(CORTEX_CLI) catalog archive -t $(notdir $(basename $@)) - @touch $@ - -$(BUILD_DIR)/%.yaml: data/catalog/%.yaml $(CUSTOM_RESOURCE_TARGETS) - $(CORTEX_CLI) catalog create -f $< - @touch $@ - -.PHONY: resource-definitions -resource-definitions: $(CUSTOM_RESOURCE_TARGETS) | $(BUILD_DIR) - -$(BUILD_DIR)/%.json: data/resource-definitions/%.json | $(BUILD_DIR) - $(CORTEX_CLI) catalog delete-by-type -t $(notdir $(basename $@)) - ($(CORTEX_CLI) resource-definitions get -t $(notdir $(basename $@)) && $(CORTEX_CLI) resource-definitions delete -t $(notdir $(basename $@)) ) || : - $(CORTEX_CLI) resource-definitions create -f $< - @touch $@ - -# -# This target performs token replacement of files in the feature-flags directory and checks -# if the contents of the file have changed since the last time it was built. If so, the -# feature flag is updated in the environment. -# -# This check is beneficial only in local test environments. As of now, no intent to save -# state between runs of an automated build, so all flags would need to be set each test -# cycle. -# -# If these flags can be set all at once and time isn't a concern, this target can most -# likely be removed. -# -.PHONY: feature-flags -feature-flags: feature-flags-dump $(FEATURE_FLAG_ENVSUBST) - -$(BUILD_DIR)/ff/envsubst/%: | $(BUILD_DIR)/ff/envsubst $(BUILD_DIR)/ff/source - @echo "Checking if feature flag $* needs to be updated" - @envsubst < feature-flags/$*.json > $@ - @diff $@ $(BUILD_DIR)/ff/source/$* 2> /dev/null || (. $(PYTHON_VENV)/bin/activate; python tests/feature_flag_set.py $*) - @cp $@ $(BUILD_DIR)/ff/source - @rm $@ - -test: test-api test-cli ## Run pytest for both API and CLI tests in the 'tests' directory - -test-api: feature-flags ## Run pytest for API tests in the 'tests' directory - @if [ -f .coverage ]; then rm .coverage; fi -ifeq ($(CORTEX_API_KEY_VIEWER),) ## Required; Cortex API key with Viewer permission, used in RBAC tests - $(error CORTEX_API_KEY_VIEWER is not set) -endif - -ifeq ($(GH_PAT),) ## GitHub Personal Access Token - $(error GH_PAT is not set) -endif - -ifeq ($(GH_WEBHOOK_SECRET),) ## GitHub webhook secret; defined in the Cortex GitHub configuration and used to create GitHub webhook - $(error GH_WEBHOOK_SECRET is not set) -endif - - @. $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests pytest -rA -n auto -m "not serial" --html=report.html --self-contained-html --cov=cortexapps_cli --cov-append --cov-report term-missing $(PYTEST_PARMS) - -test-cli: feature-flags test-api cli-tests ## Run pytest for CLI-specific tests in the 'tests' directory - -cli-tests: ## Run pytest for CLI-specific tests in the 'tests' directory - @. $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests pytest -rA -n 0 -m "serial" --cov=cortexapps_cli --cov-append --cov-report term-missing $(PYTEST_PARMS) - -test-git: feature-flags github ## Run pytest for git tests in the 'tests' directory - @. $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests pytest -k test_git - -.PHONY: clean -clean: clean-data - @rm -rf $(BUILD_DIR) - -clean-data: $(BUILD_TOOLS_DIR)/jq ${ENTITIES} - for entity in $(shell $(CORTEX_CLI) catalog list -g public-api-test | jq -r '.entities[].tag'); do \ - $(CORTEX_CLI) catalog delete -t $$entity; echo "Deleted: $$entity";\ - done - -.PHONY: feature-flags-dump -feature-flags-dump: $(FEATURE_FLAG_EXPORT) ## Dump current feature flags to $(FEATURE_FLAG_EXPORT) - -.PHONY: feature-flags-clean -feature-flags-clean: - @rm -f $(FEATURE_FLAG_EXPORT) - -$(FEATURE_FLAG_EXPORT): | $(BUILD_DIR)/ff - . $(PYTHON_VENV)/bin/activate; python3 tests/feature_flag_dump.py $@ - -.PHONY: github -github: $(BUILD_DIR)/github ## Configure Cortex GitHub integration, create GitHub webhook - -$(BUILD_DIR)/github: | $(BUILD_DIR) - . $(PYTHON_VENV)/bin/activate; PYTHONPATH=cortexapps_cli:tests python3 tests/github_setup.py - touch $@ - -$(BUILD_DIR): - @mkdir -p $@ - -$(BUILD_DIR)/ff: - @mkdir -p $@ - -$(BUILD_DIR)/ff/source: - @mkdir -p $@ - -$(BUILD_TOOLS_DIR): - @mkdir -p $@ - -$(BUILD_DIR)/ff/envsubst: - @mkdir -p $@ diff --git a/cortexapps_cli/.cortex.py.swp b/cortexapps_cli/.cortex.py.swp deleted file mode 100644 index 55b56103e4e5d535ffef1fe502d5447437ba2745..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16384 zcmeHOYm6jS6>c8B5L7TxfB3lc5W6QkT|K)CY}nAsIt;R~?5wi`%diVYb=B?e+NrLp zt;fu?D@(v93sK@D{y_){CjKGDAH<+B{82-qgv9*;EFdo*ynP@(5YX@3I#u0OOpjst zVMwZzZ>GAwd(S!d+;eZ;b84nNecAR)#5vl=3O*mGRPOrrueyhCdv*Ht^D347veOC8 zo@+;u42U1|xx_F(<9SPsU4f@Lo@qE^+8xRXY^h@7$c>; zU3#GOz_EGYl*;B!8>gx7MD=v>!P721HZf(d(gURjN)MDCC_PYmp!7iLfzkt|2i}$* zi2MzeYhn5EZ2c1dKCbZlKm6(P{WQl<+r7?w#Qc#$|JRtmk?((&JEr|#W&S$me^cQ9 z&3w%KZZXn}v4 z`OBC;^F4X_e`J0S^M?!k9}4=PDDb~$ei!#Y=cK&+-!Z?N`5OxSQw9Bx6!<5ZPwn6Q z-n{%Lm`~^bh64Y1LI0x#{xRmSX8Wd3&ddKT^J)CvT;LyNKDF<;0{;l}SF!wUr{v{7 z%zSG9Z3X@)^BwO0Qh|Snd5ihQ_vPh3$b4%5&kFnl1^FL&f4)C&2NjLwA1d&CyZlvp zp!7iLfzkt|2TBi=9wzdG)Blv&|Jb--UF5p(+dcXzxz^TCTz;VE1$ccUqTnFp`&IFDFUPLZ-1h^Tv7}yM) z0ZafV1NR{pyao6g@KqoHE&@&lo<|<~W8hBUOF$3k0v7@&0Y{MsKLp$l+zxyJI0#$- z%mSwZ&mb57IuHWq11AE%ME-vh5CA)Y4*^diiTEk-4PXFJ4dMdeanv%t3tS5v02YDE zfmbkRG;hzQMU3?$X+oHu+p)W%V>?o-Oo;6rF*7&6FnhVaZP#}F;@L$Z-DNxQ+`e=p zv1|mk(RQS;Lt)8|?MiD(tk{ktTrU!4Pnt`j;{`&l+F>MJQwpym4CtW0=BQM}$R`|x z8k$A*>B;QAbV9i``$mn|YDM?`z;>g0719S@9EcgEK)__NBnQH9EfMvkD*_n>gQSV+ z_4~N5RfpeDttsmFl>Exjo6!>!Gx;oDkQM%f?l zXmEOk+KztENT*3^iG!@-n#w3o*Lx9MM(jja=9sky4M7O5XeBUw&5{RJV-Cm?V3v*A$ijF__(f(RZ}a=t$l146EK5hN4<_D#VST8){^gMpHT^YC*d; zDGV&<#J9=LqHH3&)|NaJf^}-uClf_*5SbYbd|A(0OirrVJlAxLFcdZ2_Ot~A;r6*b z2t!{6wNGJE$!#P_r}68Z!0YQY79}P15LrVX_EbkpR9DnZ{8(d%<3>kaCWPJhu^A@X z+J?Cl`@)Bfhy>cm$VKE9gSWH_b)paJ&jvVZ$FX~WBu|JL^<01yrF{kJl|>dA5=NHo z4n3s;i;NicPxcVlG@N=~4GY-bDUi@*dGGXhnn_{9Nj~|Rp>JPtYt27>zJGkVdHv3 z7!h(T!=Ub%gxpXz4YwYmlPIXrC4JCZITlvY-qq--%`11Sq1>F8atsbwd(Oy)w=i!! zgxr8JcYHey85$W56S5l^k&S&C3s*IvW+VPceSsh=`XarR_=5KLQQ3{*Pq@FxG$cib7Xvn~Nnh?7@I-J;s;|!+g zR3uEp6&`E}um;rrKsrRSsYRYJ<1q62_UDPS!>Q~!-ZyJ0EYZzy5K6A^)jqNSdvN8d~E`ZNmZ)Hm9p~K~VXvQYh z4&n%#mKWe0wqgtfY{DVV=;8nx(P?!jPMu*K;A@A>rwiKKOh+274AL%{F~il3ldN(T z=0L2_Ns+wKGnR4Gi`#4^B;g#1?~RdrTJFKd)0o8zf5$^B*@hjvtzkDY=y{_tS+8a1 zBlXA4wPaJ1tETiLF{^%&S=by$4#Pvq{s5cSerZN~tC8n9)hqW_eUHw$bj8i8`GZr| zh0aP|-yyTcKTE4wbB!8Tab^tK)zPOZlySf%VO&?y)%JqGTcP7Eo^)BP;4iv+aa+ZT zKZtss+eoZ!@-I6KV;MF#ZP{F@BquQ#dGMM_CGnto`lr`yd-m1t%j}wufa!Ovri!uy za|eH!5|3$?7{qScDv$@DJPOnAUUv1y`Agf-!#usVz3j@Rb4`@!q%|19IKdjz?+L3; z>#GqaA>{+CrvSZ?Q&?Ucr7{zDBNsde+f>?jF0c^y*7feL!!1DhDLqhnp!7iLfzktSYY$Lb zlBEjx-#98$;Pk8ORQS+!N=|CJ-ZyMl*K3^0;E9jwNbH(P=|LS=QK#c-4i|f-C~LtX zA3w9K;SF^i)pbI(#4+tH$RJ&ikR~{kqv&)m$+DH2^|Be&AYY+`wCeoqt{vNEX7xSW z7cSHn=PsSs^Tq6Ha(bsTp~{N%NE-c6_xvcVhkkMpFOof521a1w*Vu+3PV~|=0&8pw z)b&uOHl?nr=C(S)jZ=#wX|Rqm--)}nJ601;dP_)<#`35{;lDARamsC92kWgkDBzE6 zGm~cWW4VUPVxz$+B)%9b(`eO(clfHszta-w+VY}A9!d^|OV?b} zZ`Jq$w-PIp7J@b~YD909Q2iE%n#daI#S#7+m@4#{1e7v8u`d;~uLePsWaH&j3-2+M zXV$jp09s;%2Vz^2L5cR=vF~GfhNu<V7XTYXtq?>!31Z|!FTaL`<&;=J zy%NRiJc#l#{;eHV`4*O+3RP|1 zSfLjP7h;j37>4(7Xfu_Gwq$4qwoe{u)%Xn%SE&T5ZYVThOAb0n@@vCRD0_5UTx=3} WU&K_hG~mxn_cq=GcByA#z4Bj8F&L%* diff --git a/cortexapps_cli/cli.py b/cortexapps_cli/cli.py index d5ed821..4d3e0be 100755 --- a/cortexapps_cli/cli.py +++ b/cortexapps_cli/cli.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + import typer from typing_extensions import Annotated @@ -113,7 +115,10 @@ def global_callback( if tenant not in config: raise typer.BadParameter(f"Tenant {tenant} not found in config file") api_key = config[tenant]["api_key"] - url = config[tenant]["base_url"] or url + if url not in config[tenant]: + url = url + else: + url = config[tenant]["base_url"] # strip any quotes or spaces from the api_key and url api_key = api_key.strip('"\' ') diff --git a/cortexapps_cli/command_options.py b/cortexapps_cli/command_options.py index 0d845e8..ca601cd 100644 --- a/cortexapps_cli/command_options.py +++ b/cortexapps_cli/command_options.py @@ -19,6 +19,10 @@ class ListCommandOptions: Optional[List[str]], typer.Option("--filter", "-F", help="Filters to apply on rows, in the format jsonpath=regex", show_default=False) ] + no_headers = Annotated[ + Optional[bool], + typer.Option("--no-headers", help="For csv output type only: don't print header columns.", show_default=False) + ] sort = Annotated[ Optional[List[str]], typer.Option("--sort", "-S", help="Sort order to apply on rows, in the format jsonpath:asc or jsonpath:desc", show_default=False) diff --git a/cortexapps_cli/commands/api_keys.py b/cortexapps_cli/commands/api_keys.py index 874c076..3fb585b 100644 --- a/cortexapps_cli/commands/api_keys.py +++ b/cortexapps_cli/commands/api_keys.py @@ -25,6 +25,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): diff --git a/cortexapps_cli/commands/audit_logs.py b/cortexapps_cli/commands/audit_logs.py index c6d23f8..a2ea3e9 100644 --- a/cortexapps_cli/commands/audit_logs.py +++ b/cortexapps_cli/commands/audit_logs.py @@ -45,6 +45,7 @@ def get( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): diff --git a/cortexapps_cli/commands/backup.py b/cortexapps_cli/commands/backup.py index 2e8f6ff..3443247 100644 --- a/cortexapps_cli/commands/backup.py +++ b/cortexapps_cli/commands/backup.py @@ -60,25 +60,7 @@ def _file_name(directory, tag, content, extension): def _write_file(content, file, is_json=False): with open(file, 'w') as f: if is_json: - # plugins return a dict? - - #json_data = json.loads(str(content).replace("'", '"')) # Fixing single quotes to double quotes for JSON format - - #json_data = json.loads(content) - #json.dump(json_data, f, indent=4) - - #print_json(json_data, file=f) - #print(json.dumps(json_data, indent=4), file=f) - - #print_json(data=content, file=f) print(content, file=f) - - #json.dump(content, f, indent=4) - #console.print_json(content) - #console = Console(record=True) - #console.print_json(data=content) - #f.write(console.export_text()) - #f.write(data) else: f.write(str(content) + "\n") f.close() @@ -102,7 +84,7 @@ def _catalog(ctx, directory, catalog_types): tag = tag.replace("/", "-") _file_name(directory, tag, y, "yaml") -def _entity_types(ctx, directory): +def _export_entity_types(ctx, directory): directory = _directory_name(directory, "entity-types") data = entity_types.list(ctx, include_built_in=False, page=0, page_size=250, _print=False) @@ -113,14 +95,14 @@ def _entity_types(ctx, directory): json_string = json.dumps(definition, indent=4) _file_name(directory, tag, json_string, "json") -def _ip_allowlist(ctx, directory): +def _export_ip_allowlist(ctx, directory): directory = _directory_name(directory, "ip-allowlist") - #file = directory + "/ip-allowlist.json" + file = directory + "/ip-allowlist.json" content = ip_allowlist.get(ctx, page=None, page_size=None, _print=False) _file_name(directory, "ip-allowlist", str(content), "json") -def _plugins(ctx, directory): +def _export_plugins(ctx, directory): directory = _directory_name(directory, "plugins") list = plugins.list(ctx, _print=False, include_drafts="true", page=None, page_size=None) @@ -130,7 +112,7 @@ def _plugins(ctx, directory): content = plugins.get(ctx, tag_or_id=tag, include_blob="true", _print=False) _file_name(directory, tag, content, "json") -def _scorecards(ctx, directory): +def _export_scorecards(ctx, directory): directory = _directory_name(directory, "scorecards") list = scorecards.list(ctx, show_drafts=True, page=None, page_size=None, _print=False) @@ -140,7 +122,7 @@ def _scorecards(ctx, directory): content = scorecards.descriptor(ctx, scorecard_tag=tag, _print=False) _file_name(directory, tag, content, "yaml") -def _workflows(ctx, directory): +def _export_workflows(ctx, directory): directory = _directory_name(directory, "workflows") list = workflows.list(ctx, _print=False, include_actions="false", page=None, page_size=None, search_query=None) @@ -170,7 +152,7 @@ def _parse_export_types(value: str) -> List[str]: for val in value: for item in val.split(","): if item not in backupTypes: - raise typer.BadParameter(item + " is not a valid type. Valid types are: " + backupString + ".") + raise typer.BadParameter(item + " is not a valid type. Valid types are: " + backupString + ".") else: types.append(item) return types @@ -208,29 +190,29 @@ def export( directory = directory + "-" + client.tenant _create_directory(directory) if "catalog" in export_types: - _catalog(ctx, directory, catalog_types) + _export_catalog(ctx, directory, catalog_types) if "entity-types" in export_types: - _entity_types(ctx, directory) + _export_entity_types(ctx, directory) if "ip-allowlist" in export_types: - _ip_allowlist(ctx, directory) + _export_ip_allowlist(ctx, directory) if "plugins" in export_types: - _plugins(ctx, directory) + _export_plugins(ctx, directory) if "scorecards" in export_types: - _scorecards(ctx, directory) + _export_scorecards(ctx, directory) if "workflows" in export_types: - _workflows(ctx, directory) + _export_workflows(ctx, directory) print("\nExport complete!") print("Contents available in " + directory) -def _import_ip_allowlist(directory): +def _import_ip_allowlist(ctx, directory): if os.path.isdir(directory): print("Processing: " + directory) for filename in os.listdir(directory): file_path = os.path.join(directory, filename) if os.path.isfile(file_path): print(" Importing: " + filename) - ip_allowlist.get(ctx, file=file_path, force=False, _print=False) + ip_allowlist.replace(ctx, file_input=open(file_path), addresses=None, force=False, _print=False) def _import_entity_types(ctx, force, directory): if os.path.isdir(directory): @@ -248,7 +230,7 @@ def _import_catalog(ctx, directory): file_path = os.path.join(directory, filename) if os.path.isfile(file_path): print(" Importing: " + filename) - catalog.create(ctx, file_input=open(file_path)) + catalog.create(ctx, file_input=open(file_path), _print=False) def _import_plugins(ctx, directory): if os.path.isdir(directory): @@ -257,7 +239,7 @@ def _import_plugins(ctx, directory): file_path = os.path.join(directory, filename) if os.path.isfile(file_path): print(" Importing: " + filename) - plugins.create(ctx, file_input=open(file_path)) + plugins.create(ctx, file_input=open(file_path), force=True) def _import_scorecards(ctx, directory): if os.path.isdir(directory): @@ -289,7 +271,7 @@ def import_tenant( client = ctx.obj["client"] - _import_ip_allowlist(directory + "/ip-allowlist") + _import_ip_allowlist(ctx, directory + "/ip-allowlist") _import_entity_types(ctx, force, directory + "/entity-types") _import_catalog(ctx, directory + "/catalog") _import_plugins(ctx, directory + "/plugins") diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index cb8d9a7..b4f3ae3 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -80,6 +80,7 @@ def catalog_list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], _print: CommandOptions._print = True, @@ -143,6 +144,7 @@ def details( tag: str = typer.Option(..., "--tag", "-t", help="The tag (x-cortex-tag) or unique, auto-generated identifier for the entity."), table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, + no_headers: ListCommandOptions.no_headers = False, columns: ListCommandOptions.columns = [], filters: ListCommandOptions.filters = [], ): @@ -268,6 +270,7 @@ def create( ctx: typer.Context, file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing YAML content of entity; can be passed as stdin with -, example: -f-")] = None, dry_run: CatalogCommandOptions.dry_run = False, + _print: CommandOptions._print = True, ): """ Create entity @@ -279,7 +282,8 @@ def create( } r = client.post("api/v1/open-api", data=file_input.read(), params=params, content_type="application/openapi;charset=UTF-8") - print_output_with_context(ctx, r) + if _print: + print_output_with_context(ctx, r) @app.command() def patch( diff --git a/cortexapps_cli/commands/custom_data.py b/cortexapps_cli/commands/custom_data.py index 413b72b..dca2f4e 100644 --- a/cortexapps_cli/commands/custom_data.py +++ b/cortexapps_cli/commands/custom_data.py @@ -164,6 +164,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): diff --git a/cortexapps_cli/commands/custom_events.py b/cortexapps_cli/commands/custom_events.py index 890aeae..9c9b969 100644 --- a/cortexapps_cli/commands/custom_events.py +++ b/cortexapps_cli/commands/custom_events.py @@ -170,6 +170,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): diff --git a/cortexapps_cli/commands/entity_types.py b/cortexapps_cli/commands/entity_types.py index 51830a5..af758c9 100644 --- a/cortexapps_cli/commands/entity_types.py +++ b/cortexapps_cli/commands/entity_types.py @@ -21,6 +21,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): @@ -42,7 +43,7 @@ def list( if (table_output or csv_output) and not ctx.params.get('columns'): ctx.params['columns'] = [ "Type=type", - "Source=tag", + "Source=source", "Name=name", "Description=description", ] diff --git a/cortexapps_cli/commands/gitops_logs.py b/cortexapps_cli/commands/gitops_logs.py index acecf06..616b6d8 100644 --- a/cortexapps_cli/commands/gitops_logs.py +++ b/cortexapps_cli/commands/gitops_logs.py @@ -32,6 +32,7 @@ def get( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): diff --git a/cortexapps_cli/commands/initiatives.py b/cortexapps_cli/commands/initiatives.py index aed431f..6770a76 100644 --- a/cortexapps_cli/commands/initiatives.py +++ b/cortexapps_cli/commands/initiatives.py @@ -14,12 +14,72 @@ @app.command() def create( ctx: typer.Context, - input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing YAML representation of scorecard, can be passed as stdin with -, example: -f-")] = None, - dry_run: bool = typer.Option(False, "--dry-run", "-d", help="When true, this endpoint only validates the descriptor contents and returns any errors or warnings"), + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing JSON body of request, can be passed as stdin with -, example: -f-")] = None, ): """ - Create or update a Scorecard using the descriptor YAML. The operation is determined by the existence of a Scorecard with the same tag as passed in the descriptor. + Create an Initiative. API key must have the Edit Initiative permission. """ + +description +levels +rules +exclude-groups +include-groups +query +types-include +types-exclude +isdraft +name +notification-disabled +notification-replyto +notification-time-interval +notification-time-unit +scorecard-tag +target-date +{ + "description": "text", + "emphasizedLevels": [ + { + "rank": 1 + } + ], + "emphasizedRules": [ + { + "expression": "text" + } + ], + "filter": { + "groups": { + "exclude": [ + "text" + ], + "include": [ + "text" + ] + }, + "query": "text", + "types": { + "exclude": [ + "text" + ], + "include": [ + "text" + ] + } + }, + "isDraft": true, + "name": "text", + "notificationSchedule": { + "isDisabled": true, + "replyToEmails": [ + "text" + ], + "timeInterval": 1, + "timeUnit": "text" + }, + "scorecardTag": "text", + "targetDate": "2025-06-10" +} client = ctx.obj["client"] @@ -30,7 +90,7 @@ def create( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - client.post("api/v1/scorecards/descriptor", params=params, data=input.read(), content_type="application/yaml;charset=UTF-8") + client.post("api/v1/initiatives", params=params, data=input.read()) @app.command() def delete( @@ -56,6 +116,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): @@ -81,7 +142,7 @@ def list( "Name=name", "Description=description", "TargetDate=targetDate", - "ScorecardTag=scorecardtag", + "ScorecardTag=scorecardTag", "ScorecardName=scorecardName", "IsDraft=isDraft", ] diff --git a/cortexapps_cli/commands/ip_allowlist.py b/cortexapps_cli/commands/ip_allowlist.py index b15c617..bd5a6d6 100644 --- a/cortexapps_cli/commands/ip_allowlist.py +++ b/cortexapps_cli/commands/ip_allowlist.py @@ -1,6 +1,9 @@ import typer from typing_extensions import Annotated from cortexapps_cli.command_options import CommandOptions +from cortexapps_cli.utils import print_output_with_context, print_output, print_json +import json +import ast app = typer.Typer(help="IP Allowlist commands", no_args_is_help=True) @@ -26,22 +29,34 @@ def get( params = {k: v for k, v in params.items() if v is not None} if _print: - client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) + r = client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) else: - return client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) + r = client.fetch_or_get("api/v1/ip-allowlist", page, _print, params=params) + if r is None: + data = { + "entries": [] + } + else: + data = { + "entries": r['entries'] + } + return json.dumps(data, indent=2) @app.command() def replace( ctx: typer.Context, - addresses: str = typer.Option(..., "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'"), + addresses: str = typer.Option(None, "--address", "-a", help="Comma-delimited list of IP addresses and/or IP ranges of form ipAddress[:description], for example 127.0.0.1:'my local IP'"), file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help=" File containing custom event; can be passed as stdin with -, example: -f-")] = None, - force: bool = typer.Option(False, "--force", "-o", help="When true, entries will be updated even if the list doesn't contain the requestor's IP address") + force: bool = typer.Option(False, "--force", "-o", help="When true, entries will be updated even if the list doesn't contain the requestor's IP address"), + _print: CommandOptions._print = True, ): """ Replace existing allowlist with provided list of IP addresses & ranges """ client = ctx.obj["client"] + if not addresses and not file_input: + raise typer.BadParameter("One of --addresses and --file-input is required") if file_input: data = json.loads("".join([line for line in file_input])) @@ -56,7 +71,10 @@ def replace( r = client.put("api/v1/ip-allowlist", data=data, params=params) - print_json(data=r) + if _print: + print_json(data=r) + else: + return(r) @app.command() diff --git a/cortexapps_cli/commands/packages.py b/cortexapps_cli/commands/packages.py index 56a9ccd..06707cf 100644 --- a/cortexapps_cli/commands/packages.py +++ b/cortexapps_cli/commands/packages.py @@ -27,6 +27,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): diff --git a/cortexapps_cli/commands/plugins.py b/cortexapps_cli/commands/plugins.py index 6bafd88..8602db9 100644 --- a/cortexapps_cli/commands/plugins.py +++ b/cortexapps_cli/commands/plugins.py @@ -5,6 +5,8 @@ from typing_extensions import Annotated import json import typer +import re +from urllib.error import HTTPError app = typer.Typer( help="Plugins commands", @@ -21,6 +23,7 @@ def list( table_output: ListCommandOptions.table_output = False, csv_output: ListCommandOptions.csv_output = False, columns: ListCommandOptions.columns = [], + no_headers: ListCommandOptions.no_headers = False, filters: ListCommandOptions.filters = [], sort: ListCommandOptions.sort = [], ): @@ -37,11 +40,6 @@ def list( # remove any params that are None params = {k: v for k, v in params.items() if v is not None} - - #if _print: - # client.fetch_or_get("api/v1/plugins", page, _print, params=params) - #else: - # return client.fetch_or_get("api/v1/plugins", page, _print, params=params) if (table_output or csv_output) and not ctx.params.get('columns'): ctx.params['columns'] = [ @@ -69,7 +67,8 @@ def list( @app.command() def create( ctx: typer.Context, - plugin_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of plugin using schema defined at https://docs.cortex.io/docs/api/create-plugin")] = None + file_input: Annotated[typer.FileText, typer.Option("--file", "-f", help="File containing contents of plugin using schema defined at https://docs.cortex.io/docs/api/create-plugin")] = None, + force: bool = typer.Option(False, "--force", help="Recreate entity if it already exists."), ): """ Create a new plugin @@ -77,7 +76,20 @@ def create( client = ctx.obj["client"] - client.post("api/v1/plugins", data=plugin_input.read()) + data = json.loads(file_input.read()) + + if force: + plugins = list(ctx, _print=False) + plugin_tags = [plugin["tag"] for plugin in plugins["plugins"]] + + tag = data['tag'] + if tag in plugin_tags: + # Remove the 'tag' attribute if it exists + data.pop("tag", None) + r = client.put("api/v1/plugins/" + tag, data, raw_response=True) + else: + #r = client.post("api/v1/plugins", data=file_input.read(), raw_response=True) + r = client.post("api/v1/plugins", data, raw_response=True) @app.command() def delete( @@ -113,12 +125,24 @@ def get( if _print: print_json(data=r) else: - return(r) + # Optionally replace raw newlines inside known problem keys + #fixed = str(r).replace('\n', '\\n') # crude but often works + + #data = json.loads(fixed) + #return(json.dumps(data, indent=2)) + #raw_text = r.text + + # Replace unescaped newlines inside string values with escaped \n + # WARNING: This is a heuristic and assumes newlines only appear in strings + #safe_text = re.sub(r'(?": - return args.file.read() - # import_from_backup passes the file as a string - else: - with open(args.file, 'rb') as f: - return f.read() - -def read_json_from_yaml(args): - if str(type(args.file)) == "": - data = yaml.safe_load(args.file.read()) - - else: - with open(args.file.name, 'rb') as f: - data = yaml.safe_load(f) - - return json.dumps({"spec": "" + str(data) + ""}) - -def check_config_file(config_file, replace_string): - if not os.path.isfile(config_file): - print("Cortex CLI config file " + config_file + " does not exist. Create (Y/N)?") - response = input() - if response == "Y" or response == "y": - if not os.path.isdir(os.path.dirname(config_file)): - os.mkdir(os.path.dirname(config_file), 0o700) - cortex_config_contents = textwrap.dedent('''\ - [default] - api_key = {replace} - base_url = https://api.getcortexapp.com - ''').format(replace=replace_string) - f = open(config_file, "w") - f.write(cortex_config_contents) - f.close() - - print("Created file: " + config_file) - print("Edit this file and replace the string '" + replace_string + "' with the contents") - print("of your Cortex API key and then retry your command.") - sys.exit(0) - else: - sys.exit(0) - -# If CORTEX_API_KEY environment variable is defined, will not check for existence of a cortex -# config file. -def get_config(config, args, argv, parser, replace_string): - if os.environ.get('CORTEX_API_KEY'): - if args.tenant: - if not args.quiet: - print("WARNING: tenant setting overidden by CORTEX_API_KEY", file=sys.stderr) - - cortex_base_url = os.environ.get('CORTEX_BASE_URL', default='https://api.getcortexapp.com') - config.update({"url": cortex_base_url}) - config.update({"api_key": os.environ.get("CORTEX_API_KEY")}) - config.update({"config_file": "ENVIRONMENT"}) - else: - check_config_file(args.config, replace_string) - - config_parser = configparser.ConfigParser() - config_parser.read(args.config) - tenant_config = config_parser[args.tenant] - api_key = tenant_config.get('api_key') - - # https://github.com/cortexapps/cli/issues/20 - # Deal with case where user may have added key with quotes. - # Don't want to do a global replace in case there is a quote in the key, so - # only remove if found at begining or end of the string. - api_key = api_key.lstrip('\"') - api_key = api_key.rstrip('\"') - api_key = api_key.lstrip("\'") - api_key = api_key.rstrip("\'") - - if api_key == replace_string: - print("Config file " + args.config + " has not been updated to include your Cortex API key.") - print("Add your key to the file and then retry your command.") - sys.exit(2) - config.update({"url": tenant_config.get('base_url', 'https://api.getcortexapp.com')}) - config.update({"api_key": api_key}) - config.update({"config_file": args.config}) - - config.update({"debug": args.debug}) - config.update({"noObfuscate": args.noObfuscate}) - - # args = parser.parse_args(argv) - - # return args - -def add_argument_accountId(subparser): - subparser.add_argument( - '-a', - '--accountId', - help='AWS account Id', - required=True, - default=True, - metavar='' - ) - -def add_argument_alias(subparser, help_text="The github configuration alias defined in Cortex"): - subparser.add_argument( - '-a', - '--alias', - help=help_text, - required=True, - default=True, - metavar='' - ) - -def add_argument_callee_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the callee entity.'): - subparser.add_argument( - '-e', - '--calleeTag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_caller_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the caller entity.'): - subparser.add_argument( - '-r', - '--callerTag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_departmentTag(subparser): - subparser.add_argument( - '-d', - '--departmentTag', - help='The department entity tag', - required=True, - default=True, - metavar='' - ) - -def add_argument_discovery_audit_type(subparser): - subparser.add_argument( - '-t', - '--type', - help='Filter based on type of the vent', - required=False, - choices=['NEW_REPOSITORY', 'REPOSITORY_DELETED', 'REPOSITORY_ARCHIVED', 'NEW_K8S_RESOURCE', 'NEW_APM_RESOURCE', 'APM_RESOURCE_NOT_DETECTED', 'NEW_ECS_RESOURCE', 'ECS_RESOURCE_NOT_DETECTED', 'NEW_AWS_RESOURCE', 'AWS_RESOURCE_NOT_DETECTED', 'NEW_GOOGLE_CLOUD_RESOURCE', 'GOOGLE_CLOUD_RESOURCE_NOT_DETECTED'], - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_discovery_audit_source(subparser): - subparser.add_argument( - '-s', - '--source', - help='Filter based on integration source', - required=False, - choices=['AWS', 'AZURE_DEVOPS', 'BITBUCKET', 'DATADOG', 'DYNATRACE', 'ECS', 'GCP', 'GITHUB', 'GITLAB', 'INSTANA', 'K8S', 'LIGHTSTEP', 'LAMBDA', 'NEWRELIC', 'SERVICENOW'], - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_end_time(subparser, help_text='End time for audit log retrieve'): - subparser.add_argument( - '-e', - '--endTime', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_entity_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the entity.', required=True): - subparser.add_argument( - '-e', - '--entityTag', - help=help_text, - required=required, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_environment(subparser, help_text='The environment name of the deployment to delete.'): - subparser.add_argument( - '-e', - '--environment', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_export_directory(subparser): - subparser.add_argument( - '-d', - '--directory', - help="Directory where export will be created; defaults to ~/.cortex/export/", - required=False, - default=os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), - metavar='' - ) - subparser.add_argument( - '-default-directory', - '--default-directory', - help=argparse.SUPPRESS, - required=False, - default=os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S") - ) - -def add_argument_file(subparser, help_text): - subparser.add_argument( - '-f', - '--file', - required=True, - help=help_text + "; can be passed as stdin with -, example: -f-", - default=argparse.SUPPRESS, - type=argparse.FileType('r'), - metavar='' - ) - -def add_argument_force(subparser, help_text='When true, overrides values that were defined in the catalog descriptor. Will be overwritten the next time the catalog descriptor is processed.'): - subparser.add_argument( - '-o', - '--force', - help=help_text, - action='store_true', - default='false' - ) - -def add_argument_groups(subparser): - subparser.add_argument( - '-g', - '--groups', - help='Filter based on groups, which correspond to the x-cortex-groups field in the Catalog Descriptor. Accepts a comma-delimited list of groups', - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_hierarchyDepth(subparser): - subparser.add_argument( - '-d', - '--hierarchy-depth', - help='Depth of the parent / children hierarchy nodes. Can be \'full\' or a valid integer', - default='full', - metavar='' - ) - -def add_argument_id(subparser, help_text='The id of the CQL query'): - subparser.add_argument( - '-i', - '--id', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_import_directory(subparser): - subparser.add_argument( - '-d', - '--directory', - help="Directory containing export contents", - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_includeDrafts(subparser, help_text='Include plugin drafts.'): - subparser.add_argument( - '-i', - '--includeDrafts', - help=help_text, - required=False, - default=True, - action='store_true' - ) - -def add_argument_includeHierarchyFields(subparser): - subparser.add_argument( - '-i', - '--includeHierarchyFields', - help='List of sub fields to include for hierarchies. Only supports \'groups\'', - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_includeIncoming(subparser, help_text='Including incoming dependencies.'): - subparser.add_argument( - '-i', - '--includeIncoming', - help=help_text, - required=False, - default=True, - action='store_true' - ) - -def add_argument_includeIgnored(subparser, help_text='Flag to include ignored events in result.'): - subparser.add_argument( - '-i', - '--includeIgnored', - help=help_text, - required=False, - default=False, - action='store_true' - ) - -def add_argument_includeOutgoing(subparser, help_text='Including outgoing dependencies.'): - subparser.add_argument( - '-o', - '--includeOutgoing', - help=help_text, - required=False, - default=False, - action='store_true' - ) - -def add_argument_includeTeamsWithoutMembers(subparser): - subparser.add_argument( - '-i', - '--includeTeamsWithoutMembers', - help='Include teams without members', - required=False, - default=False, - action='store_true' - ) - -def add_argument_key(subparser, help_text='Key to retrieve.'): - subparser.add_argument( - '-k', - '--key', - help=help_text, - required=True, - metavar='' - ) - -def add_argument_method(subparser, help_text='The http method type of the dependency.'): - subparser.add_argument( - '-m', - '--method', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_name(subparser, help_text='The name of the thing'): - subparser.add_argument( - '-n', - '--name', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_page(subparser, help_text='Page number to return, 0 indexed'): - subparser.add_argument( - '-p', - '--page', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_page_size(subparser, help_text='Page size for results'): - subparser.add_argument( - '-z', - '--pageSize', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_path(subparser, help_text='The path of the dependency.'): - subparser.add_argument( - '-p', - '--path', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_role(subparser): - subparser.add_argument( - '-r', - '--role', - help='AWS role', - required=True, - default=True, - metavar='' - ) - -def add_argument_scorecard_tag(subparser): - subparser.add_argument( - '-s', - '--scorecardTag', - help='Unique tag for the Scorecard', - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_sha(subparser, help_text='The sha string of the deployment to delete.'): - subparser.add_argument( - '-s', - '--sha', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_show_drafts(subparser, help_text='Include draft scorecards'): - subparser.add_argument( - '-s', - '--showDrafts', - help=help_text, - required=False, - action='store_true' - ) - -def add_argument_start_time(subparser, help_text='Start time for audit log retrieve'): - subparser.add_argument( - '-s', - '--startTime', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_tag(subparser, help_text='The entity tag (x-cortex-tag) that identifies the entity.'): - subparser.add_argument( - '-t', - '--tag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_teamTag(subparser, help_text='The tag identifing the team.'): - subparser.add_argument( - '-t', - '--teamTag', - help=help_text, - required=True, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_timestamp(subparser, help_text='Date-time of events to include.'): - subparser.add_argument( - '-i', - '--timestamp', - help=help_text, - required=False, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_timeout(subparser, help_text='Time in seconds to wait before timeout.'): - subparser.add_argument( - '-x', - '--timeout', - help=help_text, - required=False, - default=120, - metavar='' - ) - -def add_argument_type(subparser, option="-t", help_text='The resource type.', required=True): - subparser.add_argument( - option, - '--type', - help=help_text, - required=required, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_types(subparser, option="-t", help_text='Comma-separated list of entity types.', required=True, default=argparse.SUPPRESS): - subparser.add_argument( - option, - '--types', - help=help_text, - required=required, - default=default, - metavar='' - ) - -def add_argument_uuid(subparser, option="-u", help_text='UUID of custom event.', required=True): - subparser.add_argument( - option, - '--uuid', - help=help_text, - required=required, - default=argparse.SUPPRESS, - metavar='' - ) - -def add_argument_wait(subparser, help_text='Wait for query to complete.'): - subparser.add_argument( - '-w', - '--wait', - help=help_text, - required=False, - default=False, - action='store_true' - ) -def add_argument_yaml(subparser, help_text='When true, returns the YAML representation of the descriptor.'): - subparser.add_argument( - '-y', - '--yaml', - help=help_text, - action='store_true', - default=False, - required=False - ) - -def debug_json(r, method): - if config['debug']: - data = {} - data['method'] = method - data['status'] = r.status_code - data['url'] = r.url - data['history'] = str(r.history) - data['response_headers'] = dict(r.headers) - data['request_headers'] = dict(r.request.headers) - data['json'] = str(r.json) - if config['noObfuscate'] != True: - data['request_headers']['Authorization'] = "Bearer " - json_data = json.dumps(data) - print(json_data, file=sys.stderr) - -def exit(r, method, expected_rc=200, err=None): - if r.status_code != expected_rc: - sys.stderr.write(r.reason + "\n") - if r.status_code == 401: - if config['config_file'] == "ENVIRONMENT": - sys.stderr.write("\nCheck value of environment variable CORTEX_API_KEY.\n") - else: - sys.stderr.write("\nCheck your api_key in " + config['config_file'] + ".\n") - debug_json(r, method) - if err: - print(f'{method} {r.url} => {r.status_code} {r.reason}') - if err != None: - print(err) - - if not config.get('is_importing', False) or r.status_code != 409 or r.status_code != 400: - sys.exit(r.status_code) - else: - debug_json(r, method) - print(r.text) - - -def api_key(headers): - headers.update({"Authorization": "Bearer " + config['api_key']}) - - -charset_utf8 = "charset=UTF-8" - - -def default_headers(content_type='application/json', other={}): - - if not content_type.endswith(charset_utf8): - content_type = content_type + ";" + charset_utf8 - - h = { - 'Content-Type': content_type - } - - for k, v in other.items(): - h[k] = v - - return h - -# There might be a more efficient use of the requests library to combine -# these methods into a single generic method. -def get(url, headers={}): - api_key(headers) - - err = None - details = None - try: - r = requests.get(config['url'] + url, headers=headers) - r.raise_for_status() - except requests.exceptions.RequestException as e: - err = e.response.text - if err != "": - if details in e.response.json(): - details = e.response.json()['details'] - exit(r, 'GET', err=details) - - -def put(url, headers={}, payload=""): - api_key(headers) - - err = None - try: - r = requests.put(config['url'] + url, headers=headers, data=payload) - r.raise_for_status() - except requests.exceptions.RequestException as e: - err = e.response.text - exit(r, 'PUT') - -def delete(url, headers={}, payload="", expected_rc=200): - api_key(headers) - - err = None - try: - r = requests.delete(config['url'] + url, headers=headers, data=payload) - r.raise_for_status() - except requests.exceptions.RequestException as e: - err = e.response.text - exit(r, 'DELETE', expected_rc) - -def post(url, headers={}, payload="", expected_rc=200): - api_key(headers) - - err = None - try: - r = requests.post(config['url'] + url, headers=headers, data=payload) - r.raise_for_status() - except requests.exceptions.RequestException as e: - err = e.response.text - exit(r, 'POST', expected_rc, err) - -# Generate HTTP API options. Everything in the Namespace argparse object is -# added to the URL with the exception of those listed in the array below. -def parse_opts(args, ignore_tags=[]): - opts = "" - - for k, v in dict(vars(args)).items(): - if k in ['tenant', 'debug', 'noObfuscate', 'func', 'config'] + ignore_tags: - continue - if len(opts) == 0: - char = "?" - else: - char = "&" - opts = opts + char + k + "=" + str(v) - - # convert python args to valid JSON - return opts.replace("True", "true").replace("False", "false") - -# Audit Logs start -def subparser_audit_logs_opts(subparsers): - p = subparsers.add_parser('audit-logs', help='audit log commands') - sp = p.add_subparsers(help='audit logs help') - - subparser_audit_logs_get(sp) - -def subparser_audit_logs_get(subparser): - sp = subparser.add_parser('get', help='retrieve audit logs') - add_argument_end_time(sp) - add_argument_start_time(sp) - add_argument_page(sp) - add_argument_page_size(sp) - sp.set_defaults(func=audit_logs_get) - -def audit_logs_get(args): - get("/api/v1/audit-logs/" + parse_opts(args)) - -# Audit Logs end - -# Backup start -def subparser_backup_opts(subparsers): - p = subparsers.add_parser('backup', help='import/export commands') - sp = p.add_subparsers(help='backup help') - - subparser_backup_export(sp) - subparser_backup_import(sp) - -def subparser_backup_export(subparser): - sp = subparser.add_parser('export', help='Export tenant') - add_argument_export_directory(sp) - add_argument_types(sp, required=False, default="ALL", help_text="Entity types to export. In addition to Cortex x-cortex-type values, this can include scorecard and ip-allowlist. Defaults to all types.") - add_argument_page_size(sp, help_text="Number of entities to return in each catalog API call, defaults to 50.") - sp.set_defaults(func=export) - -def export(args): - # https://github.com/cortexapps/cli/issues/21 - # Cannot add this when option is added because we don't have the tenant yet. - if args.directory == args.default_directory: - args.directory = args.directory + "-" + args.tenant - - catalog_directory = args.directory + "/catalog" - json_directory = args.directory + "/json" - scorecard_directory = args.directory + "/scorecards" - teams_directory = args.directory + "/teams" - resource_definitions_directory = args.directory + "/resource-definitions" - - directory_list = [catalog_directory, json_directory, resource_definitions_directory, scorecard_directory, teams_directory] - - for directory in directory_list: - if not os.path.exists(directory): - os.makedirs(directory, exist_ok=True) - - print("Getting resource definitions") - resource_definitions_json = json_directory + "/resource-definitions.json" - resource_definitions_output = io.StringIO() - with redirect_stdout(resource_definitions_output): - resource_definitions_list(args) - with open(resource_definitions_json, 'w') as f: - f.write(resource_definitions_output.getvalue()) - data = json.loads(resource_definitions_output.getvalue()) - - # Can't sort json keys, so need to create a list first so it can be sorted. - resource_types_list = [] - for t in data['definitions']: - resource_types_list.append(t['type']) - - for resource_type in sorted(resource_types_list): - print("--> " + resource_type) - resource_file = resource_definitions_directory + "/" + resource_type + ".json" - args.type = resource_type - resource_definition_output = io.StringIO() - with redirect_stdout(resource_definition_output): - resource_definitions_retrieve(args) - with open(resource_file, 'w') as f: - f.write(resource_definition_output.getvalue()) - f.close() - - print("Getting catalog entities") - this_page = 0 - total_pages = -1 - args.yaml = True - - while not this_page == total_pages: - descriptors = json_directory + "/descriptors-" + str(this_page) + ".json" - catalog_output = io.StringIO() - with redirect_stdout(catalog_output): - save_types = args.types - if args.types == "ALL": - delattr(args, 'types') - catalog_list_descriptors(args) - args.types = save_types - - with open(descriptors, 'w') as f: - f.write(catalog_output.getvalue()) - data = json.loads(catalog_output.getvalue()) - total_pages = data['totalPages'] - this_page = this_page + 1 - args.page = this_page - - for descriptor in data['descriptors']: - y = yaml.safe_load(str(descriptor)) - tag = y['info']['x-cortex-tag'] - # Slash will be interpreted as a sub-directory - output_tag = tag.replace("/", "-") - print("--> " + tag) - f1 = open(catalog_directory + "/" + output_tag + ".yaml", 'w') - f1.write(str(descriptor) + "\n") - - # Remove page from args so we don't paginate subsequent calls. - delattr(args, 'page') - - if any(export_type == "ALL" or export_type == "ip-allowlist" for export_type in args.types.split()): - print("Getting IP Allowlist definitions") - ip_allowlist_json = json_directory + "/ip-allowlist.json" - ip_allowlist_output = io.StringIO() - with redirect_stdout(ip_allowlist_output): - ip_allowlist_get(args) - with open(ip_allowlist_json, 'w') as f: - f.write(ip_allowlist_output.getvalue()) - - if any(export_type == "ALL" or export_type == "scorecard" for export_type in args.types.split()): - print("Getting scorecards") - scorecards_json = json_directory + "/scorecards.json" - scorecards_output = io.StringIO() - with redirect_stdout(scorecards_output): - scorecards_list(args) - with open(scorecards_json, 'w') as f: - f.write(scorecards_output.getvalue()) - - data = json.loads(scorecards_output.getvalue()) - - # Can't sort json keys, so need to create a list first so it can be sorted. - scorecard_list = [] - for scorecard in data['scorecards']: - scorecard_list.append(scorecard['tag']) - - for tag in sorted(scorecard_list): - print("--> " + tag) - scorecard_file = scorecard_directory + "/" + tag + ".yaml" - args.tag = tag - scorecards_descriptor_output = io.StringIO() - with redirect_stdout(scorecards_descriptor_output): - scorecards_descriptor(args) - with open(scorecard_file, 'w') as f: - f.write(scorecards_descriptor_output.getvalue()) - delattr(args, 'tag') - - # CORTEX teams; will not try to import IDP-backed teams. Those would get re-imported after re-establishing the IDP integration. - # 2024-05-06 Will already have teams as entitites from catalog export, so should be no need to also export teams. -# print("Getting teams") -# teams_json=json_directory + "/teams.json" -# teams_output = io.StringIO() -# with redirect_stdout(teams_output): -# teams_list(args) -# with open(teams_json, 'w') as f: -# f.write(teams_output.getvalue()) -# -# data = json.loads(teams_output.getvalue()) -# -# # Can't sort json keys, so need to create a list first so it can be sorted. -# # Has to be a dictionary because we also need to know about the type of team. -# team_list = dict() -# for team in data['teams']: -# team_list[team['teamTag']] = team['type'] -# -# for team_tag, type in OrderedDict(sorted(team_list.items())).items(): -# if type != "CORTEX": -# continue -# print("--> " + team_tag) -# output_tag = team_tag.replace("/", "-") -# team_file=teams_directory + "/" + output_tag + ".json" -# args.teamTag=team_tag -# team_output = io.StringIO() -# with redirect_stdout(team_output): -# teams_get(args) -# with open(team_file, 'w') as f: -# f.write(team_output.getvalue()) - - print("\nExport complete!") - print("Contents available in " + args.directory) - -def subparser_backup_import(subparser): - sp = subparser.add_parser('import', help='Import contents of an export directory') - add_argument_import_directory(sp) - sp.set_defaults(func=import_from_export) - -def import_from_export(args): - catalog_directory = args.directory + "/catalog" - json_directory = args.directory + "/json" - scorecard_directory = args.directory + "/scorecards" - teams_directory = args.directory + "/teams" - resource_definitions_directory = args.directory + "/resource-definitions" - config.update({"is_importing": True}) - - # Default behavior is to output to terminal, so redirect stdout so it - # can be captured as a string. - resource_definitions_output = io.StringIO() - with redirect_stdout(resource_definitions_output): - resource_definitions_list(args) - resource_definitions_json = json.loads(resource_definitions_output.getvalue()) - - print("Importing resource definitions") - for file in sorted(os.listdir(resource_definitions_directory)): - print("--> " + file) - args.file = resource_definitions_directory + "/" + file - definition_type = file.replace(".json", "") - if any(definition['type'] == definition_type for definition in resource_definitions_json['definitions']): - args.type = definition_type - resource_definitions_delete(args) - resource_definitions_create(args) - - print("Importing catalog entities") - for file in sorted(os.listdir(catalog_directory)): - print("--> " + file) - args.file = catalog_directory + "/" + file - catalog_create_or_update(args) - - print("Importing IP Allowlist definitions") - args.file = json_directory + "/ip-allowlist.json" - if os.path.exists(args.file): - ip_allowlist_replace(args) - - print("Importing scorecards") - for file in sorted(os.listdir(scorecard_directory)): - print("--> " + file) - args.file = scorecard_directory + "/" + file - scorecards_create_or_update(args) - - # Default behavior is to output to terminal, so redirect stdout so it - # can be captured as a string. - teams_output = io.StringIO() - with redirect_stdout(teams_output): - teams_list(args) - teams_json = json.loads(teams_output.getvalue()) - - print("Importing teams") - for file in sorted(os.listdir(teams_directory)): - print("--> " + file) - args.file = teams_directory + "/" + file - team = file.replace(".json", "") - print("team = " + team) - print("teams_json = " + str(teams_json)) - if any(teamTag['teamTag'] == team for teamTag in teams_json['teams']): - print("deleting team: " + team) - args.teamTag = team - teams_delete(args) - teams_create(args) - - print("\nImport complete!") -# Backup end - -# Catalog start -def subparser_catalog_opts(subparsers): - p = subparsers.add_parser('catalog', help='catalog commands') - sp = p.add_subparsers(help='catalog help') - - subparser_catalog_archive(sp) - subparser_catalog_create_or_update(sp) - subparser_catalog_delete(sp) - subparser_catalog_delete_by_type(sp) - subparser_catalog_descriptor(sp) - subparser_catalog_details(sp) - subparser_catalog_gitops_logs(sp) - subparser_catalog_list(sp) - subparser_catalog_list_descriptors(sp) - subparser_catalog_scorecard_scores(sp) - subparser_catalog_unarchive(sp) - -def subparser_catalog_archive(subparser): - sp = subparser.add_parser('archive', help='archive an entity') - add_argument_tag(sp) - sp.set_defaults(func=catalog_archive) - -def catalog_archive(args): - put("/api/v1/catalog/" + args.tag + "/archive") - -def subparser_catalog_create_or_update(subparser): - sp = subparser.add_parser( - 'create', - help='Create a catalog entity using a descriptor YAML. If the YAML refers to an entity that already exists (as referenced by the x-cortex-tag), this API will update the existing entity.', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Additional documentation - ------------------------ - https://docs.cortex.io/docs/reference/basics/entities - https://docs.cortex.io/docs/reference/basics/entities#example-cortexyaml-for-service-entity contains sample entities - ''')) - add_argument_file(sp, 'File containing openapi descriptor for entity') - sp.add_argument( - '-d', - '--dry-run', - help='When true, this endpoint only validates the descriptor contents and returns any errors or warnings.', - action='store_true', - default='false' - ) - sp.add_argument( - '-g', - '--github-pr', - help='Add a comment with validation errors on the pull request with the given ID', - default=argparse.SUPPRESS, - metavar='' - ) - sp.set_defaults(func=catalog_create_or_update) - -def catalog_create_or_update(args): - post( - "/api/v1/open-api" + parse_opts(args), - default_headers('application/openapi'), read_file(args) - ) - -def subparser_catalog_delete(subparser): - sp = subparser.add_parser('delete', help='delete entity') - add_argument_tag(sp) - sp.set_defaults(func=catalog_delete) - -def catalog_delete(args): - delete("/api/v1/catalog/" + args.tag) - -def subparser_catalog_delete_by_type(subparser): - sp = subparser.add_parser('delete-by-type', help='Note: Dangerous operation that will delete all entities that are of the given type') - add_argument_types(sp) - sp.set_defaults(func=catalog_delete_by_type) - -def catalog_delete_by_type(args): - delete("/api/v1/catalog" + parse_opts(args)) - -def subparser_catalog_gitops_logs(subparser): - sp = subparser.add_parser('gitops-logs', help='Retrieve most recent GitOps log for entity') - add_argument_tag(sp) - sp.set_defaults(func=catalog_gitops_logs) - -def catalog_gitops_logs(args): - get("/api/v1/catalog/" + args.tag + "/gitops-logs") - -def subparser_catalog_list(subparser): - sp = subparser.add_parser( - 'list', - help='List all entities across the Service, Resource and Domain Catalogs.\n This API returns summary data for each entity, so refer to the retrieve entity method to lookup more details for a single entity.' - ) - sp.add_argument( - '-a', - '--includeArchived', - help='Whether to include archived entities in the response, default to false', - default=False, - action='store_true', - required=False - ) - add_argument_hierarchyDepth(sp) - add_argument_groups(sp) - add_argument_includeHierarchyFields(sp) - sp.add_argument( - '-in', - '--includeNestedFields', - help='List of sub fields to include for different types, for example team:members', - default=argparse.SUPPRESS, - metavar='', - required=False - ) - sp.add_argument( - '-io', - '--includeOwners', - help='Whether to include ownership information for each entity in the response', - default=False, - action='store_true', - required=False - ) - sp.add_argument( - '-l', - '--includeLinks', - help='Whether to include links for each entity in the response', - default=False, - action='store_true', - required=False - ) - sp.add_argument( - '-m', - '--includeMetadata', - help='Whether to include custom data for each entity in the response', - default=False, - action='store_true', - required=False - ) - sp.add_argument( - '-o', - '--owners', - help='Filter based on owner group names, which correspond to the x-cortex-owners field in the Catalog Descriptor. Accepts a comma-delimited list of owner group names', - default=argparse.SUPPRESS, - metavar='' - ) - add_argument_page(sp) - sp.add_argument( - '-r', - '--gitRepositories', - help='Supports only GitHub repositories in the org/repo format', - default=argparse.SUPPRESS, - metavar='' - ) - sp.add_argument( - '-t', - '--types', - help='Filter the response to specific types of entities. By default, this includes services, resources, and domains. Corresponds to the x-cortex-type field in the Entity Descriptor.', - default=argparse.SUPPRESS, - metavar='' - ) - add_argument_page_size(sp) - sp.set_defaults(func=catalog_list) - -def catalog_list(args): - get("/api/v1/catalog" + parse_opts(args)) - -def subparser_catalog_list_descriptors(subparser): - sp = subparser.add_parser('list-descriptors', help='List entity descriptors') - add_argument_page_size(sp) - add_argument_page(sp) - add_argument_types(sp, required=False) - add_argument_yaml(sp, help_text="When true, returns the YAML representation of the descriptors.") - sp.set_defaults(func=catalog_list_descriptors) - -def catalog_list_descriptors(args): - get("/api/v1/catalog/descriptors" + parse_opts(args)) - -def subparser_catalog_descriptor(subparser): - sp = subparser.add_parser('descriptor', help='Retrieve entity descriptor') - add_argument_tag(sp) - add_argument_yaml(sp) - sp.set_defaults(func=catalog_descriptor) - -def catalog_descriptor(args): - get("/api/v1/catalog/" + args.tag + "/openapi" + parse_opts(args)) - -def subparser_catalog_details(subparser): - sp = subparser.add_parser('details', help='Retrieve entity details') - add_argument_includeHierarchyFields(sp) - add_argument_hierarchyDepth(sp) - add_argument_tag(sp) - sp.set_defaults(func=catalog_details) - -def catalog_details(args): - get("/api/v1/catalog/" + args.tag + parse_opts(args)) - -def subparser_catalog_scorecard_scores(subparser): - sp = subparser.add_parser('scorecard-scores', help='Retrieve entity Scorecard scores') - add_argument_tag(sp) - sp.set_defaults(func=catalog_scorecard_scores) - -def catalog_scorecard_scores(args): - get("/api/v1/catalog/" + args.tag + "/scorecards") - -def subparser_catalog_unarchive(subparser): - sp = subparser.add_parser('unarchive', help='unarchive an entity') - add_argument_tag(sp) - sp.set_defaults(func=catalog_unarchive) - -def catalog_unarchive(args): - put("/api/v1/catalog/" + args.tag + "/unarchive") -# Catalog end - -# Custom Data start -def subparser_custom_data_opts(subparsers): - p = subparsers.add_parser('custom-data', help='custom_data actions') - sp = p.add_subparsers(help='custom_data help') - - subparser_custom_data_add(sp) - subparser_custom_data_bulk(sp) - subparser_custom_data_delete(sp) - subparser_custom_data_get(sp) - subparser_custom_data_list(sp) - -def subparser_custom_data_add(subparser): - sp = subparser.add_parser('add', help='Add custom data for entity', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "description": "string", - "key": "my-key", - "value": { - "nested": { - "objects": "are ok" - } - } - } - - Examples: - --------- - Single value: - { - "description": "A field to store CI/CD tool", - "key": "ci-cd-tool", - "value": "Jenkins" - } - } - - Nested values: - { - "description": "Custom field to store build metrics", - "key": "build-metrics", - "value": { - "2023-08-01": { - "success-rate": "50" - }, - "2023-08-02": { - "success-rate": "67" - } - } - } - ''')) - add_argument_tag(sp) - add_argument_file(sp, 'File containing keys to update') - add_argument_force(sp) - sp.set_defaults(func=custom_data_add) - -def custom_data_add(args): - post("/api/v1/catalog/" + args.tag + "/custom-data", default_headers(), read_file(args)) - -def subparser_custom_data_bulk(subparser): - sp = subparser.add_parser('bulk', help='Add multiple key/values of custom data to multiple entities') - add_argument_file(sp, 'File containing keys to update') - add_argument_force(sp) - sp.set_defaults(func=custom_data_bulk) - -def custom_data_bulk(args): - put("/api/v1/catalog/custom-data", default_headers(), read_file(args)) - -def subparser_custom_data_delete(subparser): - sp = subparser.add_parser('delete', help='Delete custom data for entity') - add_argument_tag(sp) - add_argument_key(sp, 'Key to delete') - sp.set_defaults(func=custom_data_delete) - -def custom_data_delete(args): - delete("/api/v1/catalog/" + args.tag + "/custom-data" + parse_opts(args)) - -def subparser_custom_data_list(subparser): - sp = subparser.add_parser('list', help='List custom data for entity') - add_argument_tag(sp) - sp.set_defaults(func=custom_data_list) - -def custom_data_list(args): - get("/api/v1/catalog/" + args.tag + "/custom-data") - -def subparser_custom_data_get(subparser): - sp = subparser.add_parser('get', help='Get custom data for entity by key') - add_argument_tag(sp) - add_argument_key(sp) - sp.set_defaults(func=custom_data_get) - -def custom_data_get(args): - get("/api/v1/catalog/" + args.tag + "/custom-data/" + args.key) -# Custom Data end - -# Custom Events start -def subparser_custom_events_opts(subparsers): - p = subparsers.add_parser('custom-events', help='custom events actions') - sp = p.add_subparsers(help='custom_events help') - - subparser_custom_events_create(sp) - subparser_custom_events_delete_all(sp) - subparser_custom_events_list(sp) - subparser_custom_events_delete_by_uuid(sp) - subparser_custom_events_get_by_uuid(sp) - subparser_custom_events_update_by_uuid(sp) - -def subparser_custom_events_create(subparser): - sp = subparser.add_parser('create', help='Create custom event', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "customData": {}, - "description": "string", - "timestamp": "2023-10-13T13:27:51.226Z", - "title": "Created K8s pod", - "type": "POD_CREATION" - } - - Example: - --------- - { - "customData": {}, - "description": "string", - "timestamp": "2023-10-13T13:27:51.226Z", - "title": "Created K8s pod", - "type": "POD_CREATION" - } - ''')) - add_argument_tag(sp) - add_argument_file(sp, 'File containing custom event to create') - sp.set_defaults(func=custom_events_create) - -def custom_events_create(args): - post("/api/v1/catalog/" + args.tag + "/custom-events", default_headers(), read_file(args)) - -def subparser_custom_events_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all custom events for an entity') - add_argument_tag(sp) - add_argument_type(sp, option='-y', help_text='The custom event type, defaults to all types', required=False) - add_argument_timestamp(sp) - sp.set_defaults(func=custom_events_delete_all) - -def custom_events_delete_all(args): - delete("/api/v1/catalog/" + args.tag + "/custom-events" + parse_opts(args), expected_rc=204) - -def subparser_custom_events_list(subparser): - sp = subparser.add_parser('list', help='List custom events for entity') - add_argument_tag(sp) - add_argument_type(sp, option='-y', help_text='The custom event type, defaults to all types', required=False) - add_argument_timestamp(sp) - sp.set_defaults(func=custom_events_list) - -def custom_events_list(args): - get("/api/v1/catalog/" + args.tag + "/custom-events" + parse_opts(args)) - -def subparser_custom_events_delete_by_uuid(subparser): - sp = subparser.add_parser('delete-by-uuid', help='Delete custom events by UUID') - add_argument_tag(sp) - add_argument_uuid(sp) - sp.set_defaults(func=custom_events_delete_by_uuid) - -def custom_events_delete_by_uuid(args): - delete("/api/v1/catalog/" + args.tag + "/custom-events/" + args.uuid, expected_rc=204) - -def subparser_custom_events_get_by_uuid(subparser): - sp = subparser.add_parser('get-by-uuid', help='Get custom event by UUID') - add_argument_tag(sp) - add_argument_uuid(sp) - sp.set_defaults(func=custom_events_get_by_uuid) - -def custom_events_get_by_uuid(args): - get("/api/v1/catalog/" + args.tag + "/custom-events/" + args.uuid) - -def subparser_custom_events_update_by_uuid(subparser): - sp = subparser.add_parser('update-by-uuid', help='Update custom event by UUID') - add_argument_tag(sp) - add_argument_uuid(sp) - add_argument_file(sp, 'File containing custom event to create') - sp.set_defaults(func=custom_events_update_by_uuid) - -def custom_events_update_by_uuid(args): - put("/api/v1/catalog/" + args.tag + "/custom-events/" + args.uuid, default_headers(), read_file(args)) -# Custom Events end - -# Groups start -def subparser_groups_opts(subparsers): - p = subparsers.add_parser('groups', help='groups commands') - sp = p.add_subparsers(help='groups subcommand help') - subparser_groups_add(sp) - subparser_groups_delete(sp) - subparser_groups_get(sp) - -def subparser_groups_add(subparser): - sp = subparser.add_parser('add', help='Add groups to entity') - add_argument_tag(sp) - add_argument_file(sp, 'File containing JSON array of groups to add') - sp.set_defaults(func=groups_add) - -def groups_add(args): - put("/api/v1/catalog/" + args.tag + "/groups", default_headers(), payload=read_file(args)) - -def subparser_groups_delete(subparser): - sp = subparser.add_parser('delete', help='Delete group from entity') - add_argument_tag(sp) - add_argument_file(sp, 'File containing JSON array of groups to delete') - sp.set_defaults(func=groups_delete) - -def groups_delete(args): - headers = default_headers() - delete("/api/v1/catalog/" + args.tag + "/groups", headers, read_file(args)) - -def subparser_groups_get(subparser): - sp = subparser.add_parser('get', help='Get groups for entity') - add_argument_tag(sp) - sp.set_defaults(func=groups_get) - -def groups_get(args): - get("/api/v1/catalog/" + args.tag + "/groups") -# Groups end - -# Dependencies start -def subparser_dependencies_opts(subparsers): - p = subparsers.add_parser('dependencies', help='dependencies commands') - sp = p.add_subparsers(help='dependencies help') - - subparser_dependencies_add(sp) - subparser_dependencies_add_in_bulk(sp) - subparser_dependencies_delete(sp) - subparser_dependencies_delete_all(sp) - subparser_dependencies_delete_in_bulk(sp) - subparser_dependencies_get(sp) - subparser_dependencies_get_all(sp) - subparser_dependencies_update(sp) - -def subparser_dependencies_add(subparser): - sp = subparser.add_parser('add', - help='Create dependency from an entity', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "description": "This is a description of the dependency", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - } - ''')) - add_argument_caller_tag(sp) - add_argument_callee_tag(sp) - add_argument_method(sp) - add_argument_path(sp) - add_argument_file(sp, 'File containing JSON-formatted description and metadata') - sp.set_defaults(func=dependencies_add) - -def dependencies_add(args): - headers = default_headers() - post("/api/v1/catalog/" + args.callerTag + "/dependencies/" + args.calleeTag, headers, payload=read_file(args), expected_rc=201) - -def subparser_dependencies_add_in_bulk(subparser): - sp = subparser.add_parser('add-in-bulk', - help='Create or update dependencies in bulk', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "values": { - "dependency-service": [ - { - "description": "dependency description", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - }, - "method": "GET", - "path": "/2.0/users/{username}", - "tag": "test-service" - } - ] - } - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted array of dependencies') - sp.set_defaults(func=dependencies_add_in_bulk) - -def dependencies_add_in_bulk(args): - headers = default_headers() - put("/api/v1/catalog/dependencies", headers, payload=read_file(args)) - -def subparser_dependencies_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a dependency from an entity') - add_argument_caller_tag(sp) - add_argument_callee_tag(sp) - add_argument_method(sp) - add_argument_path(sp) - sp.set_defaults(func=dependencies_delete) - -def dependencies_delete(args): - delete("/api/v1/catalog/" + args.callerTag + "/dependencies/" + args.calleeTag, expected_rc=204) - -def subparser_dependencies_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Deletes any outgoing dependencies from the entity. Note: this only deletes dependencies that were created via the API.') - add_argument_caller_tag(sp) - sp.set_defaults(func=dependencies_delete_all) - -def dependencies_delete_all(args): - delete("/api/v1/catalog/" + args.callerTag + "/dependencies", expected_rc=204) - -def subparser_dependencies_delete_in_bulk(subparser): - sp = subparser.add_parser('delete-in-bulk', - help='Delete dependencies in bulk', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "values": { - "dependency-service": [ - { - "description": "dependency description", - "metadata": { - "someField": "someField data", - "someField1": "someField1 data" - }, - "method": "GET", - "path": "/2.0/users/{username}", - "tag": "test-service" - } - ] - } - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted array of dependencies') - sp.set_defaults(func=dependencies_delete_in_bulk) - -def dependencies_delete_in_bulk(args): - headers = default_headers() - delete("/api/v1/catalog/dependencies", headers, payload=read_file(args), expected_rc=204) - -def subparser_dependencies_get(subparser): - sp = subparser.add_parser('get', help='Retrieve dependency between entities') - add_argument_caller_tag(sp) - add_argument_callee_tag(sp) - add_argument_method(sp) - add_argument_path(sp) - sp.set_defaults(func=dependencies_get) - -def dependencies_get(args): - get("/api/v1/catalog/" + args.callerTag + "/dependencies/" + args.calleeTag + parse_opts(args)) - -def subparser_dependencies_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all dependencies for an entity') - add_argument_caller_tag(sp) - add_argument_includeOutgoing(sp) - add_argument_includeIncoming(sp) - sp.set_defaults(func=dependencies_get_all) - -def dependencies_get_all(args): - get("/api/v1/catalog/" + args.callerTag + "/dependencies" + parse_opts(args)) - -def subparser_dependencies_update(subparser): - sp = subparser.add_parser('update', help='Update dependency between entities') - add_argument_caller_tag(sp) - add_argument_callee_tag(sp) - add_argument_method(sp) - add_argument_path(sp) - add_argument_file(sp, 'File containing JSON-formatted description and metadata') - sp.set_defaults(func=dependencies_update) - -def dependencies_update(args): - headers = default_headers() - put("/api/v1/catalog/" + args.callerTag + "/dependencies/" + args.calleeTag + parse_opts(args), headers, payload=read_file(args)) -# Dependencies end - -# Deploys start -def subparser_deploys_opts(subparsers): - p = subparsers.add_parser('deploys', help='deploys commands') - sp = p.add_subparsers(help='deploys help') - - subparser_deploys_add(sp) - subparser_deploys_delete(sp) - subparser_deploys_delete_all(sp) - subparser_deploys_delete_by_uuid(sp) - subparser_deploys_delete_filter(sp) - subparser_deploys_list(sp) - subparser_deploys_update_by_uuid(sp) - -def subparser_deploys_add(subparser): - sp = subparser.add_parser('add', help='Add a deployment to an entity') - add_argument_tag(sp) - add_argument_file(sp, 'File containing JSON-formatted deployment details') - sp.set_defaults(func=deploys_add) - -def deploys_add(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/deploys", headers, payload=read_file(args)) - -def subparser_deploys_list(subparser): - sp = subparser.add_parser('list', help='List deployments for an entity') - add_argument_tag(sp) - sp.set_defaults(func=deploys_list) - -def deploys_list(args): - get("/api/v1/catalog/" + args.tag + "/deploys") - -def subparser_deploys_delete(subparser): - sp = subparser.add_parser('delete', help='Delete deployments for an entity') - add_argument_tag(sp) - add_argument_environment(sp) - add_argument_sha(sp) - add_argument_type(sp, option="-y", help_text="Deployment type to delete", required=False) - sp.set_defaults(func=deploys_delete) - -def deploys_delete(args): - delete("/api/v1/catalog/" + args.tag + "/deploys" + parse_opts(args)) - -def subparser_deploys_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all deployments for all entities') - sp.set_defaults(func=deploys_delete_all) - -def deploys_delete_all(args): - delete("/api/v1/catalog/deploys/all") - -def subparser_deploys_delete_by_uuid(subparser): - sp = subparser.add_parser('delete-by-uuid', help='Delete deployment by UUID') - add_argument_tag(sp) - add_argument_uuid(sp, help_text="UUID of deploy to delete") - sp.set_defaults(func=deploys_delete_by_uuid) - -def deploys_delete_by_uuid(args): - delete("/api/v1/catalog/" + args.tag + "/deploys/" + args.uuid) - -def subparser_deploys_delete_filter(subparser): - sp = subparser.add_parser('delete-filter', help='Delete deployments for all entities based on a filter') - add_argument_environment(sp) - add_argument_sha(sp) - add_argument_type(sp, option="-y", help_text="Deployment type to delete", required=False) - sp.set_defaults(func=deploys_delete_filter) - -def deploys_delete_filter(args): - delete("/api/v1/catalog/deploys" + parse_opts(args)) - -def subparser_deploys_update_by_uuid(subparser): - sp = subparser.add_parser('update-by-uuid', help='Update deployment by UUID') - add_argument_file(sp, 'File containing JSON-formatted deployment details') - add_argument_tag(sp) - add_argument_uuid(sp, help_text="UUID of deploy to update") - sp.set_defaults(func=deploys_update_by_uuid) - -def deploys_update_by_uuid(args): - headers = default_headers() - put("/api/v1/catalog/" + args.tag + "/deploys/" + args.uuid, headers, payload=read_file(args)) -# Deploys end - -# Discovery Audit start -def subparser_discovery_audit_opts(subparsers): - p = subparsers.add_parser('discovery-audit', help='Discovery Audit commands') - sp = p.add_subparsers(help='discovery audit help') - - subparser_discovery_audit_get(sp) - -def subparser_discovery_audit_get(subparser): - sp = subparser.add_parser('get', - help="This report shows you recent changes in your environment that aren't reflected in Cortex, including newly created repositories, services, and resources that we discover from your integrations or which were deleted in the environment but corresponding Cortex entities are still present.Add a deployment to an entity", - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Possible values for source, type: - --------------------------------- - source: - - AWS - - AZURE_DEVOPS - - BITBUCKET - - DATADOG - - DYNATRACE - - ECS - - GCP - - GITHUB - - GITLAB - - INSTANA - - K8S - - LIGHTSTEP - - LAMBDA - - NEWRELIC - - SERVICENOW - - type: - - APM_RESOURCE_NOT_DETECTED - - AWS_RESOURCE_NOT_DETECTED - - ECS_RESOURCE_NOT_DETECTED - - GOOGLE_CLOUD_RESOURCE_NOT_DETECTED - - NEW_APM_RESOURCE - - NEW_AWS_RESOURCE - - NEW_ECS_RESOURCE - - NEW_GOOGLE_CLOUD_RESOURCE - - NEW_K8S_RESOURCE - - NEW_REPOSITORY - - REPOSITORY_ARCHIVED - - REPOSITORY_DELETED - ''')) - add_argument_discovery_audit_source(sp) - add_argument_discovery_audit_type(sp) - add_argument_includeIgnored(sp) - sp.set_defaults(func=discovery_audit_get) - -def discovery_audit_get(args): - get("/api/v1/discovery-audit" + parse_opts(args)) -# Discovery Audit end - -# Docs Start -def subparser_docs_opts(subparsers): - p = subparsers.add_parser('docs', help='OpenAPI doc commands') - sp = p.add_subparsers(help='docs subcommand help') - - subparser_docs_delete(sp) - subparser_docs_retrieve(sp) - subparser_docs_update(sp) - -def subparser_docs_delete(subparser): - sp = subparser.add_parser('delete', help='Delete OpenAPI docs for entity') - add_argument_tag(sp) - sp.set_defaults(func=docs_delete) - -def docs_delete(args): - delete("/api/v1/catalog/" + args.tag + "/documentation/openapi", expected_rc=204) - -def subparser_docs_retrieve(subparser): - sp = subparser.add_parser('get', help='Retrieve OpenAPI docs for entity') - add_argument_tag(sp) - sp.set_defaults(func=docs_retrieve) - -def docs_retrieve(args): - get("/api/v1/catalog/" + args.tag + "/documentation/openapi") - -def subparser_docs_update(subparser): - sp = subparser.add_parser('update', help='Update OpenAPI docs for entity') - add_argument_tag(sp) - add_argument_file(sp, 'File containing stringified JSON representation of the OpenAPI spec') - sp.set_defaults(func=docs_update) - -def docs_update(args): - headers = default_headers() - put("/api/v1/catalog/" + args.tag + "/documentation/openapi", headers, payload=read_json_from_yaml(args)) -# Docs End - -# Gitops Logs Start -def subparser_gitops_logs_opts(subparsers): - p = subparsers.add_parser('gitops-logs', help='Gitops logs commands') - sp = p.add_subparsers(help='gitops-logs subcommand help') - - subparser_gitops_logs_get(sp) - -def subparser_gitops_logs_get(subparser): - sp = subparser.add_parser('get', help='Retrieve GitOps logs') - add_argument_page(sp) - add_argument_page_size(sp) - sp.set_defaults(func=gitops_logs_get) - -def gitops_logs_get(args): - get("/api/v1/gitops-logs/" + parse_opts(args)) -# Gitops Logs End - -# Groups start -def subparser_groups_opts(subparsers): - p = subparsers.add_parser('groups', help='groups commands') - sp = p.add_subparsers(help='groups subcommand help') - subparser_groups_add(sp) - subparser_groups_delete(sp) - subparser_groups_get(sp) - -def subparser_groups_add(subparser): - sp = subparser.add_parser('add', help='Add groups to entity') - add_argument_tag(sp) - add_argument_file(sp, 'File containing JSON array of groups to add') - sp.set_defaults(func=groups_add) - -def groups_add(args): - headers = default_headers() - put("/api/v1/catalog/" + args.tag + "/groups", headers, payload=read_file(args)) - -def subparser_groups_delete(subparser): - sp = subparser.add_parser('delete', help='Delete group from entity') - add_argument_tag(sp) - add_argument_file(sp, 'File containing JSON array of groups to delete') - sp.set_defaults(func=groups_delete) - -def groups_delete(args): - headers = default_headers() - delete("/api/v1/catalog/" + args.tag + "/groups", headers, read_file(args)) - -def subparser_groups_get(subparser): - sp = subparser.add_parser('get', help='Get groups for entity') - add_argument_tag(sp) - sp.set_defaults(func=groups_get) - -def groups_get(args): - get("/api/v1/catalog/" + args.tag + "/groups") -# Groups end - -# Integrations start -def subparser_integrations_opts(subparsers): - p = subparsers.add_parser('integrations', help='integrations sub-commands') - sp = p.add_subparsers(help='integrations subcommand help') - - ssp = sp.add_parser('aws', help='AWS integration') - subparser_integrations_aws_opts(ssp) - - ssp = sp.add_parser('azure-resources', help='Azure resources integration') - subparser_integrations_azure_resources_opts(ssp) - - ssp = sp.add_parser('coralogix', help='Coralogix integration') - subparser_integrations_coralogix_opts(ssp) - - ssp = sp.add_parser('datadog', help='Datadog integration') - subparser_integrations_datadog_opts(ssp) - - ssp = sp.add_parser('github', help='GitHub integration') - subparser_integrations_github_opts(ssp) - - ssp = sp.add_parser('gitlab', help='GitLab integration') - subparser_integrations_gitlab_opts(ssp) - - ssp = sp.add_parser('incidentio', help='Incident.io integration') - subparser_integrations_incidentio_opts(ssp) - - ssp = sp.add_parser('launchdarkly', help='Launchdarkly integration') - subparser_integrations_launchdarkly_opts(ssp) - - ssp = sp.add_parser('newrelic', help='Newrelic integration') - subparser_integrations_newrelic_opts(ssp) - - ssp = sp.add_parser('pagerduty', help='Pagerduty integration') - subparser_integrations_pagerduty_opts(ssp) - - ssp = sp.add_parser('prometheus', help='Prometheus integration') - subparser_integrations_prometheus_opts(ssp) - - ssp = sp.add_parser('sonarqube', help='Sonarqube integration') - subparser_integrations_sonarqube_opts(ssp) -# Integrations end - -# Integrations-AWS start -def subparser_integrations_aws_opts(subparser): - sp = subparser.add_subparsers(help='integrations - aws help') - - subparser_integrations_aws_get(sp) - subparser_integrations_aws_get_all(sp) - subparser_integrations_aws_validate(sp) - subparser_integrations_aws_validate_all(sp) - subparser_integrations_aws_add(sp) - subparser_integrations_aws_update(sp) - subparser_integrations_aws_delete(sp) - subparser_integrations_aws_delete_all(sp) - -def subparser_integrations_aws_get(subparser): - sp = subparser.add_parser('get', help='Retrieve a configuration') - add_argument_accountId(sp) - sp.set_defaults(func=integrations_aws_get) - -def integrations_aws_get(args): - get("/api/v1/aws/configurations/" + str(args.accountId)) - -def subparser_integrations_aws_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_aws_get_all) - -def integrations_aws_get_all(args): - get("/api/v1/aws/configurations") - -def subparser_integrations_aws_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a configuration') - add_argument_accountId(sp) - sp.set_defaults(func=integrations_aws_validate) - -def integrations_aws_validate(args): - post("/api/v1/aws/configurations/validate/" + str(args.accountId)) - -def subparser_integrations_aws_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_aws_validate_all) - -def integrations_aws_validate_all(args): - post("/api/v1/aws/configurations/all/validate") - -def subparser_integrations_aws_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_accountId(sp) - add_argument_role(sp) - sp.set_defaults(func=integrations_aws_add) - -def integrations_aws_add(args): - headers = default_headers() - payload = "{ \"accountId\": \"" + args.accountId + "\", \"role\": \"" + args.role + "\"}" - post("/api/v1/aws/configurations", headers, payload=payload) - -def subparser_integrations_aws_update(subparser): - sp = subparser.add_parser('update', help='Update configurations') - add_argument_file(sp, 'File containing JSON-formatted configuration; all configurations will be replaced') - sp.set_defaults(func=integrations_aws_update) - -def integrations_aws_update(args): - headers = default_headers() - put("/api/v1/aws/configurations", headers, payload=read_file(args)) - -def subparser_integrations_aws_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a configuration') - add_argument_accountId(sp) - sp.set_defaults(func=integrations_aws_delete) - -def integrations_aws_delete(args): - delete("/api/v1/aws/configurations/" + args.accountId) - -def subparser_integrations_aws_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_aws_delete_all) - -def integrations_aws_delete_all(args): - delete("/api/v1/aws/configurations") -# Integrations-AWS end - -# Integrations-azure-resources start -def subparser_integrations_azure_resources_opts(subparser): - sp = subparser.add_subparsers(help='integrations - azure-resources help') - - subparser_integrations_azure_resources_add(sp) - subparser_integrations_azure_resources_add_multiple(sp) - subparser_integrations_azure_resources_delete(sp) - subparser_integrations_azure_resources_delete_all(sp) - subparser_integrations_azure_resources_get(sp) - subparser_integrations_azure_resources_get_all(sp) - subparser_integrations_azure_resources_get_default(sp) - subparser_integrations_azure_resources_update(sp) - subparser_integrations_azure_resources_validate(sp) - subparser_integrations_azure_resources_validate_all(sp) - -def subparser_integrations_azure_resources_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "alias": "string", - "azureTenantId": "string", - "clientId": "string", - "clientSecret": "string", - "isDefault": true, - "subscriptionId": "string" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted azure-resources configuration') - sp.set_defaults(func=integrations_azure_resources_add) - -def integrations_azure_resources_add(args): - headers = default_headers() - post("/api/v1/azure-resources/configuration", headers, payload=read_file(args)) - -def subparser_integrations_azure_resources_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "configurations": [ - { - "alias": "string", - "azureTenantId": "string", - "clientId": "string", - "clientSecret": "string", - "isDefault": true, - "subscriptionId": "string" - } - ] - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted azure-resources configurations') - sp.set_defaults(func=integrations_azure_resources_add_multiple) - -def integrations_azure_resources_add_multiple(args): - headers = default_headers() - post("/api/v1/azure-resources/configurations", headers, payload=read_file(args)) - -def subparser_integrations_azure_resources_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_azure_resources_delete) - -def integrations_azure_resources_delete(args): - delete("/api/v1/azure-resources/configuration/" + args.alias) - -def subparser_integrations_azure_resources_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_azure_resources_delete_all) - -def integrations_azure_resources_delete_all(args): - delete("/api/v1/azure-resources/configurations") - -def subparser_integrations_azure_resources_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_azure_resources_get) - -def integrations_azure_resources_get(args): - get("/api/v1/azure-resources/configuration/" + args.alias) - -def subparser_integrations_azure_resources_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_azure_resources_get_all) - -def integrations_azure_resources_get_all(args): - get("/api/v1/azure-resources/configurations") - -def subparser_integrations_azure_resources_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_azure_resources_get_default) - -def integrations_azure_resources_get_default(args): - get("/api/v1/azure-resources/default-configuration") - -def subparser_integrations_azure_resources_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted azure-resources configuration') - sp.set_defaults(func=integrations_azure_resources_update) - -def integrations_azure_resources_update(args): - headers = default_headers() - put("/api/v1/azure-resources/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_azure_resources_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_azure_resources_validate) - -def integrations_azure_resources_validate(args): - post("/api/v1/azure-resources/configuration/validate/" + args.alias) - -def subparser_integrations_azure_resources_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_azure_resources_validate_all) - -def integrations_azure_resources_validate_all(args): - post("/api/v1/azure-resources/configuration/validate") -# Integrations-azure-resources end - -# Integrations-coralogix start -def subparser_integrations_coralogix_opts(subparser): - sp = subparser.add_subparsers(help='integrations - coralogix help') - - subparser_integrations_coralogix_add(sp) - subparser_integrations_coralogix_add_multiple(sp) - subparser_integrations_coralogix_delete(sp) - subparser_integrations_coralogix_delete_all(sp) - subparser_integrations_coralogix_get(sp) - subparser_integrations_coralogix_get_all(sp) - subparser_integrations_coralogix_get_default(sp) - subparser_integrations_coralogix_update(sp) - subparser_integrations_coralogix_validate(sp) - subparser_integrations_coralogix_validate_all(sp) - -def subparser_integrations_coralogix_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted coralogix configuration') - sp.set_defaults(func=integrations_coralogix_add) - -def integrations_coralogix_add(args): - headers = default_headers() - post("/api/v1/coralogix/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_coralogix_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "accountId": 0, - "alias": "string", - "isDefault": true, - "personalKey": "string", - "region": "US" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted coralogix configurations') - sp.set_defaults(func=integrations_coralogix_add_multiple) - -def integrations_coralogix_add_multiple(args): - headers = default_headers() - post("/api/v1/coralogix/configurations", headers, payload=read_file(args)) - -def subparser_integrations_coralogix_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_coralogix_delete) - -def integrations_coralogix_delete(args): - delete("/api/v1/coralogix/configuration/" + args.alias) - -def subparser_integrations_coralogix_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_coralogix_delete_all) - -def integrations_coralogix_delete_all(args): - delete("/api/v1/coralogix/configurations") - -def subparser_integrations_coralogix_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_coralogix_get) - -def integrations_coralogix_get(args): - get("/api/v1/coralogix/configuration/" + args.alias) - -def subparser_integrations_coralogix_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_coralogix_get_all) - -def integrations_coralogix_get_all(args): - get("/api/v1/coralogix/configurations") - -def subparser_integrations_coralogix_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_coralogix_get_default) - -def integrations_coralogix_get_default(args): - get("/api/v1/coralogix/default-configuration") - -def subparser_integrations_coralogix_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted coralogix configuration') - sp.set_defaults(func=integrations_coralogix_update) - -def integrations_coralogix_update(args): - headers = default_headers() - put("/api/v1/coralogix/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_coralogix_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_coralogix_validate) - -def integrations_coralogix_validate(args): - post("/api/v1/coralogix/configuration/validate/" + args.alias) - -def subparser_integrations_coralogix_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_coralogix_validate_all) - -def integrations_coralogix_validate_all(args): - post("/api/v1/coralogix/configuration/validate") -# Integrations-coralogix end - -# Integrations-github start -def subparser_integrations_github_opts(subparser): - sp = subparser.add_subparsers(help='integrations - github help') - - subparser_integrations_github_add(sp) - subparser_integrations_github_add_personal(sp) - subparser_integrations_github_delete(sp) - subparser_integrations_github_delete_all(sp) - subparser_integrations_github_delete_personal(sp) - subparser_integrations_github_get(sp) - subparser_integrations_github_get_all(sp) - subparser_integrations_github_get_default(sp) - subparser_integrations_github_get_personal(sp) - subparser_integrations_github_update(sp) - subparser_integrations_github_update_personal(sp) - subparser_integrations_github_validate(sp) - subparser_integrations_github_validate_all(sp) - -def subparser_integrations_github_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "alias": "string", - "apiHost": "string", - "appUrl": "string", - "applicationId": "string", - "clientId": "string", - "clientSecret": "string", - "isDefault": true, - "privateKey": "string" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted github configuration') - sp.set_defaults(func=integrations_github_add) - -def integrations_github_add(args): - headers = default_headers() - post("/api/v1/github/configurations/app", headers, payload=read_file(args)) - -def subparser_integrations_github_add_personal(subparser): - sp = subparser.add_parser('add-personal', - help='Add a single personal configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "accessToken": "string", - "alias": "string", - "apiHost": "string", - "isDefault": true - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted github configuration') - sp.set_defaults(func=integrations_github_add_personal) - -def integrations_github_add_personal(args): - headers = default_headers() - post("/api/v1/github/configurations/personal", headers, payload=read_file(args)) - -def subparser_integrations_github_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_github_delete) - -def integrations_github_delete(args): - delete("/api/v1/github/configurations/app/" + args.alias) - -def subparser_integrations_github_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_github_delete_all) - -def integrations_github_delete_all(args): - delete("/api/v1/github/configurations") - -def subparser_integrations_github_delete_personal(subparser): - sp = subparser.add_parser('delete-personal', help='Delete a personal configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_github_delete_personal) - -def integrations_github_delete_personal(args): - delete("/api/v1/github/configurations/personal/" + args.alias) - -def subparser_integrations_github_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_github_get) - -def integrations_github_get(args): - get("/api/v1/github/configurations/app/" + args.alias) - -def subparser_integrations_github_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_github_get_all) - -def integrations_github_get_all(args): - get("/api/v1/github/configurations") - -def subparser_integrations_github_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_github_get_default) - -def integrations_github_get_default(args): - get("/api/v1/github/default-configuration") - -def subparser_integrations_github_get_personal(subparser): - sp = subparser.add_parser('get-personal', help='Get a single personal configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_github_get_personal) - -def integrations_github_get_personal(args): - get("/api/v1/github/configurations/personal/" + args.alias) - -def subparser_integrations_github_get_personal(subparser): - sp = subparser.add_parser('get-personal', help='Get a single personal configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_github_get_personal) - -def integrations_github_get_personal(args): - get("/api/v1/github/configurations/personal/" + args.alias) - -def subparser_integrations_github_update(subparser): - sp = subparser.add_parser('update', help='Update a single app configuration.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted github configuration') - sp.set_defaults(func=integrations_github_update) - -def integrations_github_update(args): - headers = default_headers() - put("/api/v1/github/configurations/app/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_github_update_personal(subparser): - sp = subparser.add_parser('update-personal', help='Update a single personal configuration.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted github configuration') - sp.set_defaults(func=integrations_github_update_personal) - -def integrations_github_update_personal(args): - headers = default_headers() - put("/api/v1/github/configurations/personal/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_github_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_github_validate) - -def integrations_github_validate(args): - post("/api/v1/github/configurations/validate/" + args.alias) - -def subparser_integrations_github_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_github_validate_all) - -def integrations_github_validate_all(args): - post("/api/v1/github/configurations/validate") -# Integrations-github end - -# Integrations-gitlab start -def subparser_integrations_gitlab_opts(subparser): - sp = subparser.add_subparsers(help='integrations - gitlab help') - - subparser_integrations_gitlab_add(sp) - subparser_integrations_gitlab_add_multiple(sp) - subparser_integrations_gitlab_delete(sp) - subparser_integrations_gitlab_delete_all(sp) - subparser_integrations_gitlab_get(sp) - subparser_integrations_gitlab_get_all(sp) - subparser_integrations_gitlab_get_default(sp) - subparser_integrations_gitlab_update(sp) - subparser_integrations_gitlab_validate(sp) - subparser_integrations_gitlab_validate_all(sp) - -def subparser_integrations_gitlab_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted gitlab configuration') - sp.set_defaults(func=integrations_gitlab_add) - -def integrations_gitlab_add(args): - headers = default_headers() - post("/api/v1/gitlab/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_gitlab_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "configurations": [ - { - "alias": "string", - "groupNames": [ - "string" - ], - "hidePersonalProjects": true, - "host": "string", - "isDefault": true, - "personalAccessToken": "string" - } - ] - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted gitlab configurations') - sp.set_defaults(func=integrations_gitlab_add_multiple) - -def integrations_gitlab_add_multiple(args): - headers = default_headers() - post("/api/v1/gitlab/configurations", headers, payload=read_file(args)) - -def subparser_integrations_gitlab_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_gitlab_delete) - -def integrations_gitlab_delete(args): - delete("/api/v1/gitlab/configuration/" + args.alias) - -def subparser_integrations_gitlab_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_gitlab_delete_all) - -def integrations_gitlab_delete_all(args): - delete("/api/v1/gitlab/configurations") - -def subparser_integrations_gitlab_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_gitlab_get) - -def integrations_gitlab_get(args): - get("/api/v1/gitlab/configuration/" + args.alias) - -def subparser_integrations_gitlab_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_gitlab_get_all) - -def integrations_gitlab_get_all(args): - get("/api/v1/gitlab/configurations") - -def subparser_integrations_gitlab_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_gitlab_get_default) - -def integrations_gitlab_get_default(args): - get("/api/v1/gitlab/default-configuration") - -def subparser_integrations_gitlab_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted gitlab configuration') - sp.set_defaults(func=integrations_gitlab_update) - -def integrations_gitlab_update(args): - headers = default_headers() - put("/api/v1/gitlab/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_gitlab_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_gitlab_validate) - -def integrations_gitlab_validate(args): - post("/api/v1/gitlab/configuration/validate/" + args.alias) - -def subparser_integrations_gitlab_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_gitlab_validate_all) - -def integrations_gitlab_validate_all(args): - post("/api/v1/gitlab/configuration/validate") -# Integrations-gitlab end - -# Integrations-datadog start -def subparser_integrations_datadog_opts(subparser): - sp = subparser.add_subparsers(help='integrations - datadog help') - - subparser_integrations_datadog_add(sp) - subparser_integrations_datadog_add_multiple(sp) - subparser_integrations_datadog_delete(sp) - subparser_integrations_datadog_delete_all(sp) - subparser_integrations_datadog_get(sp) - subparser_integrations_datadog_get_all(sp) - subparser_integrations_datadog_get_default(sp) - subparser_integrations_datadog_update(sp) - -def subparser_integrations_datadog_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted datadog configuration') - sp.set_defaults(func=integrations_datadog_add) - -def integrations_datadog_add(args): - headers = default_headers() - post("/api/v1/datadog/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_datadog_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "alias": "string", - "apiKey": "string", - "appKey": "string", - "customSubdomain": "string", - "environments": [ - "string" - ], - "isDefault": true, - "region": "EU1" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted datadog configurations') - sp.set_defaults(func=integrations_datadog_add_multiple) - -def integrations_datadog_add_multiple(args): - headers = default_headers() - post("/api/v1/datadog/configurations", headers, payload=read_file(args)) - -def subparser_integrations_datadog_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_datadog_delete) - -def integrations_datadog_delete(args): - delete("/api/v1/datadog/configuration/" + args.alias) - -def subparser_integrations_datadog_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_datadog_delete_all) - -def integrations_datadog_delete_all(args): - delete("/api/v1/datadog/configurations") - -def subparser_integrations_datadog_get(subparser): - sp = subparser.add_parser('get', help='Get a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_datadog_get) - -def integrations_datadog_get(args): - get("/api/v1/datadog/configuration/" + args.alias) - -def subparser_integrations_datadog_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_datadog_get_all) - -def integrations_datadog_get_all(args): - get("/api/v1/datadog/configurations") - -def subparser_integrations_datadog_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_datadog_get_default) - -def integrations_datadog_get_default(args): - get("/api/v1/datadog/default-configuration") - -def subparser_integrations_datadog_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted datadog configuration') - sp.set_defaults(func=integrations_datadog_update) - -def integrations_datadog_update(args): - headers = default_headers() - put("/api/v1/datadog/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_datadog_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_datadog_validate) - -def integrations_datadog_validate(args): - post("/api/v1/datadog/configuration/validate/" + args.alias) - -def subparser_integrations_datadog_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_datadog_validate_all) - -def integrations_datadog_validate_all(args): - post("/api/v1/datadog/configuration/validate") -# Integrations-datadog end - -# Integrations-incidentio start -def subparser_integrations_incidentio_opts(subparser): - sp = subparser.add_subparsers(help='integrations - incident.io help') - - subparser_integrations_incidentio_add(sp) - subparser_integrations_incidentio_add_multiple(sp) - subparser_integrations_incidentio_delete(sp) - subparser_integrations_incidentio_delete_all(sp) - subparser_integrations_incidentio_get(sp) - subparser_integrations_incidentio_get_all(sp) - subparser_integrations_incidentio_get_default(sp) - subparser_integrations_incidentio_update(sp) - subparser_integrations_incidentio_validate(sp) - subparser_integrations_incidentio_validate_all(sp) - -def subparser_integrations_incidentio_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "alias": "string", - "apiKey": "string", - "isDefault": true - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted incidentio configuration') - sp.set_defaults(func=integrations_incidentio_add) - -def integrations_incidentio_add(args): - headers = default_headers() - post("/api/v1/incidentio/configuration", headers, payload=read_file(args)) - -def subparser_integrations_incidentio_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "configurations": [ - { - "alias": "string", - "apiKey": "string", - "isDefault": true - }, - { - "alias": "string", - "apiKey": "string", - "isDefault": true - } - ] - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted incidentio configurations') - sp.set_defaults(func=integrations_incidentio_add_multiple) - -def integrations_incidentio_add_multiple(args): - headers = default_headers() - post("/api/v1/incidentio/configurations", headers, payload=read_file(args)) - -def subparser_integrations_incidentio_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_incidentio_delete) - -def integrations_incidentio_delete(args): - delete("/api/v1/incidentio/configuration/" + args.alias) - -def subparser_integrations_incidentio_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_incidentio_delete_all) - -def integrations_incidentio_delete_all(args): - delete("/api/v1/incidentio/configurations") - -def subparser_integrations_incidentio_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_incidentio_get) - -def integrations_incidentio_get(args): - get("/api/v1/incidentio/configuration/" + args.alias) - -def subparser_integrations_incidentio_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_incidentio_get_all) - -def integrations_incidentio_get_all(args): - get("/api/v1/incidentio/configurations") - -def subparser_integrations_incidentio_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_incidentio_get_default) - -def integrations_incidentio_get_default(args): - get("/api/v1/incidentio/default-configuration") - -def subparser_integrations_incidentio_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted incidentio configuration') - sp.set_defaults(func=integrations_incidentio_update) - -def integrations_incidentio_update(args): - headers = default_headers() - put("/api/v1/incidentio/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_incidentio_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_incidentio_validate) - -def integrations_incidentio_validate(args): - post("/api/v1/incidentio/configuration/validate/" + args.alias) - -def subparser_integrations_incidentio_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_incidentio_validate_all) - -def integrations_incidentio_validate_all(args): - post("/api/v1/incidentio/configuration/validate") -# Integrations-incidentio end - -# Integrations-launchdarkly start -def subparser_integrations_launchdarkly_opts(subparser): - sp = subparser.add_subparsers(help='integrations - launchdarkly help') - - subparser_integrations_launchdarkly_add(sp) - subparser_integrations_launchdarkly_add_multiple(sp) - subparser_integrations_launchdarkly_delete(sp) - subparser_integrations_launchdarkly_delete_all(sp) - subparser_integrations_launchdarkly_get(sp) - subparser_integrations_launchdarkly_get_all(sp) - subparser_integrations_launchdarkly_get_default(sp) - subparser_integrations_launchdarkly_update(sp) - subparser_integrations_launchdarkly_validate(sp) - subparser_integrations_launchdarkly_validate_all(sp) - -def subparser_integrations_launchdarkly_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted launchdarkly configuration') - sp.set_defaults(func=integrations_launchdarkly_add) - -def integrations_launchdarkly_add(args): - headers = default_headers() - post("/api/v1/launchdarkly/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_launchdarkly_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "accountId": 0, - "alias": "string", - "isDefault": true, - "personalKey": "string", - "region": "US" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted launchdarkly configurations') - sp.set_defaults(func=integrations_launchdarkly_add_multiple) - -def integrations_launchdarkly_add_multiple(args): - headers = default_headers() - post("/api/v1/launchdarkly/configurations", headers, payload=read_file(args)) - -def subparser_integrations_launchdarkly_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_launchdarkly_delete) - -def integrations_launchdarkly_delete(args): - delete("/api/v1/launchdarkly/configuration/" + args.alias) - -def subparser_integrations_launchdarkly_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_launchdarkly_delete_all) - -def integrations_launchdarkly_delete_all(args): - delete("/api/v1/launchdarkly/configurations") - -def subparser_integrations_launchdarkly_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_launchdarkly_get) - -def integrations_launchdarkly_get(args): - get("/api/v1/launchdarkly/configuration/" + args.alias) - -def subparser_integrations_launchdarkly_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_launchdarkly_get_all) - -def integrations_launchdarkly_get_all(args): - get("/api/v1/launchdarkly/configurations") - -def subparser_integrations_launchdarkly_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_launchdarkly_get_default) - -def integrations_launchdarkly_get_default(args): - get("/api/v1/launchdarkly/default-configuration") - -def subparser_integrations_launchdarkly_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted launchdarkly configuration') - sp.set_defaults(func=integrations_launchdarkly_update) - -def integrations_launchdarkly_update(args): - headers = default_headers() - put("/api/v1/launchdarkly/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_launchdarkly_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_launchdarkly_validate) - -def integrations_launchdarkly_validate(args): - post("/api/v1/launchdarkly/configuration/validate/" + args.alias) - -def subparser_integrations_launchdarkly_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_launchdarkly_validate_all) - -def integrations_launchdarkly_validate_all(args): - post("/api/v1/launchdarkly/configuration/validate") -# Integrations-launchdarkly end - -# Integrations-newrelic start -def subparser_integrations_newrelic_opts(subparser): - sp = subparser.add_subparsers(help='integrations - newrelic help') - - subparser_integrations_newrelic_add(sp) - subparser_integrations_newrelic_add_multiple(sp) - subparser_integrations_newrelic_delete(sp) - subparser_integrations_newrelic_delete_all(sp) - subparser_integrations_newrelic_get(sp) - subparser_integrations_newrelic_get_all(sp) - subparser_integrations_newrelic_get_default(sp) - subparser_integrations_newrelic_update(sp) - subparser_integrations_newrelic_validate(sp) - subparser_integrations_newrelic_validate_all(sp) - -def subparser_integrations_newrelic_add(subparser): - sp = subparser.add_parser('add', - help='Add a single configuration', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "accountId": 0, - "alias": "string", - "isDefault": true, - "personalKey": "string", - "region": "US" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted newrelic configuration') - sp.set_defaults(func=integrations_newrelic_add) - -def integrations_newrelic_add(args): - headers = default_headers() - post("/api/v1/newrelic/configuration", headers, payload=read_file(args)) - -def subparser_integrations_newrelic_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "configurations": [ - { - "accountId": 0, - "alias": "string", - "isDefault": true, - "personalKey": "string", - "region": "US" - } - ] - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted newrelic configurations') - sp.set_defaults(func=integrations_newrelic_add_multiple) - -def integrations_newrelic_add_multiple(args): - headers = default_headers() - post("/api/v1/newrelic/configurations", headers, payload=read_file(args)) - -def subparser_integrations_newrelic_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_newrelic_delete) - -def integrations_newrelic_delete(args): - delete("/api/v1/newrelic/configuration/" + args.alias) - -def subparser_integrations_newrelic_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_newrelic_delete_all) - -def integrations_newrelic_delete_all(args): - delete("/api/v1/newrelic/configurations") - -def subparser_integrations_newrelic_get(subparser): - sp = subparser.add_parser('get', help='Get a single configuration') - add_argument_alias(sp) - sp.set_defaults(func=integrations_newrelic_get) - -def integrations_newrelic_get(args): - get("/api/v1/newrelic/configuration/" + args.alias) - -def subparser_integrations_newrelic_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_newrelic_get_all) - -def integrations_newrelic_get_all(args): - get("/api/v1/newrelic/configurations") - -def subparser_integrations_newrelic_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_newrelic_get_default) - -def integrations_newrelic_get_default(args): - get("/api/v1/newrelic/default-configuration") - -def subparser_integrations_newrelic_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted newrelic configuration') - sp.set_defaults(func=integrations_newrelic_update) - -def integrations_newrelic_update(args): - headers = default_headers() - put("/api/v1/newrelic/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_newrelic_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_newrelic_validate) - -def integrations_newrelic_validate(args): - post("/api/v1/newrelic/configuration/validate/" + args.alias) - -def subparser_integrations_newrelic_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_newrelic_validate_all) - -def integrations_newrelic_validate_all(args): - post("/api/v1/newrelic/configuration/validate") -# Integrations-newrelic end - -# Integrations-prometheus start -def subparser_integrations_prometheus_opts(subparser): - sp = subparser.add_subparsers(help='integrations - prometheus help') - - subparser_integrations_prometheus_add(sp) - subparser_integrations_prometheus_add_multiple(sp) - subparser_integrations_prometheus_delete(sp) - subparser_integrations_prometheus_delete_all(sp) - subparser_integrations_prometheus_get(sp) - subparser_integrations_prometheus_get_all(sp) - subparser_integrations_prometheus_get_default(sp) - subparser_integrations_prometheus_update(sp) - -def subparser_integrations_prometheus_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted prometheus configuration') - sp.set_defaults(func=integrations_prometheus_add) - -def integrations_prometheus_add(args): - headers = default_headers() - post("/api/v1/prometheus/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_prometheus_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "alias": "string", - "host": "string", - "isDefault": true, - "password": "string", - "prometheusTenantId": "string", - "username": "string" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted prometheus configurations') - sp.set_defaults(func=integrations_prometheus_add_multiple) - -def integrations_prometheus_add_multiple(args): - headers = default_headers() - post("/api/v1/prometheus/configurations", headers, payload=read_file(args)) - -def subparser_integrations_prometheus_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_prometheus_delete) - -def integrations_prometheus_delete(args): - delete("/api/v1/prometheus/configuration/" + args.alias) - -def subparser_integrations_prometheus_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_prometheus_delete_all) - -def integrations_prometheus_delete_all(args): - delete("/api/v1/prometheus/configurations") - -def subparser_integrations_prometheus_get(subparser): - sp = subparser.add_parser('get', help='Get a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_prometheus_get) - -def integrations_prometheus_get(args): - get("/api/v1/prometheus/configuration/" + args.alias) - -def subparser_integrations_prometheus_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_prometheus_get_all) - -def integrations_prometheus_get_all(args): - get("/api/v1/prometheus/configurations") - -def subparser_integrations_prometheus_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_prometheus_get_default) - -def integrations_prometheus_get_default(args): - get("/api/v1/prometheus/default-configuration") - -def subparser_integrations_prometheus_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted prometheus configuration') - sp.set_defaults(func=integrations_prometheus_update) - -def integrations_prometheus_update(args): - headers = default_headers() - put("/api/v1/prometheus/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_prometheus_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_prometheus_validate) - -def integrations_prometheus_validate(args): - post("/api/v1/prometheus/configuration/validate/" + args.alias) - -def subparser_integrations_prometheus_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_prometheus_validate_all) - -def integrations_prometheus_validate_all(args): - post("/api/v1/prometheus/configuration/validate") -# Integrations-prometheus end - -# Integrations-pagerduty start -def subparser_integrations_pagerduty_opts(subparser): - sp = subparser.add_subparsers(help='integrations - pagerduty help') - - subparser_integrations_pagerduty_add(sp) - subparser_integrations_pagerduty_delete(sp) - subparser_integrations_pagerduty_get(sp) - subparser_integrations_pagerduty_validate(sp) - -def subparser_integrations_pagerduty_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted pagerduty configuration') - sp.set_defaults(func=integrations_pagerduty_add) - -def integrations_pagerduty_add(args): - headers = default_headers() - post("/api/v1/pagerduty/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_pagerduty_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configurations') - sp.set_defaults(func=integrations_pagerduty_delete) - -def integrations_pagerduty_delete(args): - delete("/api/v1/pagerduty/configurations") - -def subparser_integrations_pagerduty_get(subparser): - sp = subparser.add_parser('get', help='Get a single configurations') - sp.set_defaults(func=integrations_pagerduty_get) - -def integrations_pagerduty_get(args): - get("/api/v1/pagerduty/default-configuration") - -def subparser_integrations_pagerduty_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a configuration') - sp.set_defaults(func=integrations_pagerduty_validate) - -def integrations_pagerduty_validate(args): - post("/api/v1/pagerduty/configuration/validate") -# Integrations-pagerduty end - -# Integrations-sonarqube start -def subparser_integrations_sonarqube_opts(subparser): - sp = subparser.add_subparsers(help='integrations - sonarqube help') - - subparser_integrations_sonarqube_add(sp) - subparser_integrations_sonarqube_add_multiple(sp) - subparser_integrations_sonarqube_delete(sp) - subparser_integrations_sonarqube_delete_all(sp) - subparser_integrations_sonarqube_get(sp) - subparser_integrations_sonarqube_get_all(sp) - subparser_integrations_sonarqube_get_default(sp) - subparser_integrations_sonarqube_update(sp) - subparser_integrations_sonarqube_validate(sp) - subparser_integrations_sonarqube_validate_all(sp) - -def subparser_integrations_sonarqube_add(subparser): - sp = subparser.add_parser('add', help='Add a single configuration') - add_argument_file(sp, 'File containing JSON-formatted sonarqube configuration') - sp.set_defaults(func=integrations_sonarqube_add) - -def integrations_sonarqube_add(args): - headers = default_headers() - post("/api/v1/sonarqube/configuration/", headers, payload=read_file(args)) - -def subparser_integrations_sonarqube_add_multiple(subparser): - sp = subparser.add_parser('add-multiple', - help='Add multiple configurations', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "alias": "string", - "host": "string", - "isDefault": true, - "token": "string" - } - ''')) - add_argument_file(sp, 'File containing JSON-formatted sonarqube configurations') - sp.set_defaults(func=integrations_sonarqube_add_multiple) - -def integrations_sonarqube_add_multiple(args): - headers = default_headers() - post("/api/v1/sonarqube/configurations", headers, payload=read_file(args)) - -def subparser_integrations_sonarqube_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_sonarqube_delete) - -def integrations_sonarqube_delete(args): - delete("/api/v1/sonarqube/configuration/" + args.alias) - -def subparser_integrations_sonarqube_delete_all(subparser): - sp = subparser.add_parser('delete-all', help='Delete all configurations') - sp.set_defaults(func=integrations_sonarqube_delete_all) - -def integrations_sonarqube_delete_all(args): - delete("/api/v1/sonarqube/configurations") - -def subparser_integrations_sonarqube_get(subparser): - sp = subparser.add_parser('get', help='Get a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_sonarqube_get) - -def integrations_sonarqube_get(args): - get("/api/v1/sonarqube/configuration/" + args.alias) - -def subparser_integrations_sonarqube_get_all(subparser): - sp = subparser.add_parser('get-all', help='Get all configurations') - sp.set_defaults(func=integrations_sonarqube_get_all) - -def integrations_sonarqube_get_all(args): - get("/api/v1/sonarqube/configurations") - -def subparser_integrations_sonarqube_get_default(subparser): - sp = subparser.add_parser('get-default', help='Get default configuration') - sp.set_defaults(func=integrations_sonarqube_get_default) - -def integrations_sonarqube_get_default(args): - get("/api/v1/sonarqube/default-configuration") - -def subparser_integrations_sonarqube_update(subparser): - sp = subparser.add_parser('update', help='WARNING: Updating aliases for configurations or changing the default configuration could cause entity YAMLs that use this integration to break.') - add_argument_alias(sp) - add_argument_file(sp, 'File containing JSON-formatted sonarqube configuration') - sp.set_defaults(func=integrations_sonarqube_update) - -def integrations_sonarqube_update(args): - headers = default_headers() - put("/api/v1/sonarqube/configuration/" + args.alias, headers, payload=read_file(args)) - -def subparser_integrations_sonarqube_validate(subparser): - sp = subparser.add_parser('validate', help='Validate a single configurations') - add_argument_alias(sp) - sp.set_defaults(func=integrations_sonarqube_validate) - -def integrations_sonarqube_validate(args): - post("/api/v1/sonarqube/configuration/validate/" + args.alias) - -def subparser_integrations_sonarqube_validate_all(subparser): - sp = subparser.add_parser('validate-all', help='Validate all configurations') - sp.set_defaults(func=integrations_sonarqube_validate_all) - -def integrations_sonarqube_validate_all(args): - post("/api/v1/sonarqube/configuration/validate") -# Integrations-sonarqube end - -# IP Allowlist start -def subparser_ip_allowlist_opts(subparsers): - p = subparsers.add_parser('ip-allowlist', help='IP Allowlist information') - sp = p.add_subparsers(help='IP Allowlist help') - - subparser_ip_allowlist_get(sp) - subparser_ip_allowlist_replace(sp) - subparser_ip_allowlist_validate(sp) - -def subparser_ip_allowlist_get(subparser): - sp = subparser.add_parser('get', help='Get allowlist of IP addresses and ranges') - sp.set_defaults(func=ip_allowlist_get) - -def ip_allowlist_get(args): - get("/api/v1/ip-allowlist") - -def subparser_ip_allowlist_replace(subparser): - sp = subparser.add_parser('replace', - help='Replace allowlist of IP addresses and ranges', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "entries": [ - { - "address": "10.0.0.1", - "description": "string" - } - ] - } - ''')) - add_argument_file(sp, 'file containing JSON-formatted content of IP allowlist entries') - sp.set_defaults(func=ip_allowlist_replace) - -def ip_allowlist_replace(args): - headers = default_headers() - put("/api/v1/ip-allowlist", headers, read_file(args)) - -def subparser_ip_allowlist_validate(subparser): - sp = subparser.add_parser('validate', - help='Validate allowlist of IP addresses and ranges', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "entries": [ - { - "address": "10.0.0.1", - "description": "string" - } - ] - } - ''')) - add_argument_file(sp, 'file containing JSON-formatted content of IP allowlist entries') - sp.set_defaults(func=ip_allowlist_validate) - -def ip_allowlist_validate(args): - headers = default_headers() - post("/api/v1/ip-allowlist/validate", headers, read_file(args)) -# IP Allowlist end - -# On-Call start -def subparser_on_call_opts(subparsers): - p = subparsers.add_parser('on-call', help='get on-call information') - sp = p.add_subparsers(help='on-call help') - - subparser_on_call_get(sp) - subparser_on_call_get_registration(sp) - -def subparser_on_call_get(subparser): - sp = subparser.add_parser('get', help='Get current on-call for an entity') - sp.set_defaults(func=on_call_get) - -def on_call_get(args): - get("/api/v1/catalog/" + args.tag + "/integrations/oncall/current") - -def subparser_on_call_get_registration(subparser): - sp = subparser.add_parser('get-registration', help='Retrieve on-call registration for entity') - add_argument_tag(sp) - sp.set_defaults(func=on_call_get_registration) - -def on_call_get_registration(args): - get("/api/v1/catalog/" + args.tag + "/integrations/oncall/registration") -# On-Call end - -# Packages start -def subparser_packages_opts(subparsers): - p = subparsers.add_parser('packages', help='commands to create and modify packages') - sp = p.add_subparsers(help='packages help') - - subparser_packages_list_packages(sp) - ssp = sp.add_parser('go', help='Go package commands') - subparser_packages_go_opts(ssp) - ssp = sp.add_parser('java', help='Java package commands') - subparser_packages_java_opts(ssp) - ssp = sp.add_parser('python', help='Python package commands') - subparser_packages_python_opts(ssp) - ssp = sp.add_parser('node', help='Node package commands') - subparser_packages_node_opts(ssp) - ssp = sp.add_parser('nuget', help='NuGet package commands') - subparser_packages_nuget_opts(ssp) - -def subparser_packages_list_packages(subparser): - sp = subparser.add_parser('list', help='List packages') - add_argument_tag(sp) - sp.set_defaults(func=packages_list_packages) - -def packages_list_packages(args): - get("/api/v1/catalog/" + args.tag + "/packages") - -def subparser_packages_go_opts(subparser): - sp = subparser.add_subparsers(help='Go package sub-commands.') - - subparser_packages_upload_go(sp) - subparser_packages_delete_go(sp) - -def subparser_packages_upload_go(subparser): - sp = subparser.add_parser('upload', help='Upload go.sum package.') - add_argument_tag(sp) - add_argument_file(sp, 'File containing contents of go.sum') - sp.set_defaults(func=packages_upload_go) - -def packages_upload_go(args): - headers = default_headers('application/text') - post("/api/v1/catalog/" + args.tag + "/packages/go/gosum", headers, read_file(args)) - -def subparser_packages_delete_go(subparser): - sp = subparser.add_parser('delete', help='Delete go package from entity') - add_argument_tag(sp) - add_argument_name(sp, "The name of the package to delete") - sp.set_defaults(func=packages_delete_go) - -def packages_delete_go(args): - delete("/api/v1/catalog/" + args.tag + "/packages/go" + parse_opts(args)) - -def subparser_packages_java_opts(subparser): - sp = subparser.add_subparsers(help='Java package sub-commands.') - - subparser_packages_upload_java_single(sp) - subparser_packages_upload_java_multiple(sp) - subparser_packages_delete_java(sp) - -def subparser_packages_upload_java_single(subparser): - sp = subparser.add_parser('upload-single', help='Upload single java package') - add_argument_tag(sp) - add_argument_file(sp, 'JSON file containing contents of single java package') - sp.set_defaults(func=packages_upload_java_single) - -def packages_upload_java_single(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/java", headers, read_file(args)) - -def subparser_packages_upload_java_multiple(subparser): - sp = subparser.add_parser('upload-multiple', help='Upload multiple java packages') - add_argument_tag(sp) - add_argument_file(sp, 'JSON file containing array of java packages') - sp.set_defaults(func=packages_upload_java_multiple) - -def packages_upload_java_multiple(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/java/bulk", headers, read_file(args)) - -def subparser_packages_delete_java(subparser): - sp = subparser.add_parser('delete', help='Delete java package from entity') - add_argument_tag(sp) - add_argument_name(sp, "The name of the package to delete") - sp.set_defaults(func=packages_delete_java) - -def packages_delete_java(args): - delete("/api/v1/catalog/" + args.tag + "/packages/java" + parse_opts(args)) - -def subparser_packages_python_opts(subparser): - sp = subparser.add_subparsers(help='Python package sub-commands.') - - subparser_packages_upload_python_pipfile(sp) - subparser_packages_upload_python_requirements(sp) - subparser_packages_delete_python(sp) - -def subparser_packages_upload_python_pipfile(subparser): - sp = subparser.add_parser('upload-pipfile', help='Upload python pipfile.lock file') - add_argument_tag(sp) - add_argument_file(sp, 'pipfile.lock file') - sp.set_defaults(func=packages_upload_python_pipfile) - -def packages_upload_python_pipfile(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/python/pipfile", headers, read_file(args)) - -def subparser_packages_upload_python_requirements(subparser): - sp = subparser.add_parser('upload-requirements', help='Upload python requirements.txt file') - add_argument_tag(sp) - add_argument_file(sp, 'requirements.txt file') - sp.set_defaults(func=packages_upload_python_requirements) - -def packages_upload_python_requirements(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/python/requirements", headers, read_file(args)) - -def subparser_packages_delete_python(subparser): - sp = subparser.add_parser('delete', help='Delete python package from entity') - add_argument_tag(sp) - add_argument_name(sp, "The name of the package to delete") - sp.set_defaults(func=packages_delete_python) - -def packages_delete_python(args): - delete("/api/v1/catalog/" + args.tag + "/packages/python" + parse_opts(args)) - -def subparser_packages_node_opts(subparser): - sp = subparser.add_subparsers(help='Node package sub-commands.') - - subparser_packages_upload_node_package(sp) - subparser_packages_upload_node_package_lock(sp) - subparser_packages_upload_node_yarn_lock(sp) - subparser_packages_delete_node(sp) - -def subparser_packages_upload_node_package(subparser): - sp = subparser.add_parser('upload-package', help='Upload node package.json file') - add_argument_tag(sp) - add_argument_file(sp, 'package.json file') - sp.set_defaults(func=packages_upload_node_package) - -def packages_upload_node_package(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/node/package-json", headers, read_file(args)) - -def subparser_packages_upload_node_package_lock(subparser): - sp = subparser.add_parser('upload-package-lock', help='Upload node package-lock.json file') - add_argument_tag(sp) - add_argument_file(sp, 'package-lock.json file') - sp.set_defaults(func=packages_upload_node_package_lock) - -def packages_upload_node_package_lock(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/node/package-lock", headers, read_file(args)) - -def subparser_packages_upload_node_yarn_lock(subparser): - sp = subparser.add_parser('upload-yarn-lock', help='Upload yarn.lock file') - add_argument_tag(sp) - add_argument_file(sp, 'yarn.lock file') - sp.set_defaults(func=packages_upload_node_yarn_lock) - -def packages_upload_node_yarn_lock(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/node/yarn-lock", headers, read_file(args)) - -def subparser_packages_delete_node(subparser): - sp = subparser.add_parser('delete', help='Delete node package from entity') - add_argument_tag(sp) - add_argument_name(sp, "The name of the package to delete") - sp.set_defaults(func=packages_delete_node) - -def packages_delete_node(args): - delete("/api/v1/catalog/" + args.tag + "/packages/node" + parse_opts(args)) - -def subparser_packages_nuget_opts(subparser): - sp = subparser.add_subparsers(help='NuGet package sub-commands.') - - subparser_packages_upload_nuget_csproj(sp) - subparser_packages_upload_nuget_packages_lock(sp) - subparser_packages_delete_nuget(sp) - -def subparser_packages_upload_nuget_csproj(subparser): - sp = subparser.add_parser('upload-csproj', help='Upload Nuget csproj') - add_argument_tag(sp) - add_argument_file(sp, '*.csproj file') - sp.set_defaults(func=packages_upload_nuget_csproj) - -def packages_upload_nuget_csproj(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/dotnet/nuget/csproj", headers, read_file(args)) - -def subparser_packages_upload_nuget_packages_lock(subparser): - sp = subparser.add_parser('upload-packages-lock', help='Upload Nuget packages.lock.json') - add_argument_tag(sp) - add_argument_file(sp, 'packages.lock.json file') - sp.set_defaults(func=packages_upload_nuget_packages_lock) - -def packages_upload_nuget_packages_lock(args): - headers = default_headers() - post("/api/v1/catalog/" + args.tag + "/packages/dotnet/nuget/packages-lock", headers, read_file(args)) - -def subparser_packages_delete_nuget(subparser): - sp = subparser.add_parser('delete', help='Delete nuget package from entity') - add_argument_tag(sp) - add_argument_name(sp, "The name of the package to delete") - sp.set_defaults(func=packages_delete_nuget) - -def packages_delete_nuget(args): - delete("/api/v1/catalog/" + args.tag + "/packages/dotnet/nuget" + parse_opts(args)) -# Packages end - -# Plugins start -def subparser_plugins_opts(subparsers): - p = subparsers.add_parser('plugins', help='commands to create and access plugins') - sp = p.add_subparsers(help='plugins help') - - subparser_plugins_create(sp) - subparser_plugins_delete(sp) - subparser_plugins_get(sp) - subparser_plugins_get_by_tag(sp) - subparser_plugins_update(sp) - -def subparser_plugins_create(subparser): - sp = subparser.add_parser('create', - help='Create a new plugin', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "blob": "string", - "contexts": [ - { - "type": "string" - } - ], - "description": "string", - "isDraft": true, - "minimumRoleRequired": "VIEWER", - "name": "string", - "proxyTag": "string", - "tag": "string" - } - - Additional documentation - ------------------------ - https://docs.cortex.io/docs/api/create-plugin - ''')) - add_argument_file(sp, 'File containing JSON-formatted body of plugin definition') - sp.set_defaults(func=plugins_create) - -def plugins_create(args): - headers = default_headers() - post("/api/v1/plugins", headers, payload=read_file(args)) - -def subparser_plugins_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a plugin by tag') - add_argument_tag(sp, 'The tag of the plugin.') - sp.set_defaults(func=plugins_delete) - -def plugins_delete(args): - delete("/api/v1/plugins/" + args.tag, expected_rc=204) - -def subparser_plugins_get(subparser): - sp = subparser.add_parser('get', help='Get all plugins, excluding drafts') - add_argument_includeDrafts(sp) - sp.set_defaults(func=plugins_get) - -def plugins_get(args): - get("/api/v1/plugins" + parse_opts(args)) - -def subparser_plugins_get_by_tag(subparser): - sp = subparser.add_parser('get-by-tag', help='Retrieve the metadata of a plugin by tag') - add_argument_tag(sp, 'The tag of the plugin.') - sp.set_defaults(func=plugins_get_by_tag) - -def plugins_get_by_tag(args): - get("/api/v1/plugins/" + args.tag) - -def subparser_plugins_update(subparser): - sp = subparser.add_parser('update', - help='Create a new plugin', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "blob": "string", - "contexts": [ - { - "type": "string" - } - ], - "description": "string", - "isDraft": true, - "minimumRoleRequired": "VIEWER", - "name": "string", - "proxyTag": "string", - } - - Additional documentation - ------------------------ - https://docs.cortex.io/docs/api/update-plugin - ''')) - add_argument_file(sp, 'File containing JSON-formatted body of plugin definition') - add_argument_tag(sp, 'The tag of the plugin.') - sp.set_defaults(func=plugins_update) - -def plugins_update(args): - headers = default_headers() - put("/api/v1/plugins/" + args.tag, headers, payload=read_file(args)) -# Plugins end - -# Queries start -def subparser_queries_opts(subparsers): - p = subparsers.add_parser('queries', help='run CQL queries') - sp = p.add_subparsers(help='queries help') - - subparser_queries_run(sp) - subparser_queries_get(sp) - -def subparser_queries_run(subparser): - sp = subparser.add_parser('run', help='Run CQL query', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Query input can be provided either as a JSON-formatted file or in a file - containing the query text that would be used in Query Builder in the Cortex UI. - In the latter case, the text will be converted into the expected JSON format - and then sent to the API. - - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "query": "tag = \"test-service\" and custom(\"testField\") != null" - } - - Same query as above as CQL input: - --------------------------------- - tag = "test-service" and custom("testField") != null - - The --wait and --timeout parameters are optional. If they are not passed - as parameters, the CLI returns a JSON response containing the name of the - jobId. - - Subsequent calls to "queries get" can be made to check for completion status. - - With the wait parameter set, the CLI will make the calls to "queries get" - and not return a JSON response until the query completes. The default wait - time is 120 seconds. If the query does not complete in that time, the command - fails. The wait time can be configured with the --timeout parameter. - ''')) - add_argument_file(sp, 'File containing JSON-formatted CQL query') - add_argument_wait(sp, 'Optional; wait for query to complete.') - add_argument_timeout(sp, 'Valid on with -w flag; time in seconds to allow for wait to run') - sp.set_defaults(func=queries_run) - -def queries_run(args): - headers = default_headers() - if hasattr(args, "wait"): - query_output = io.StringIO() - with redirect_stdout(query_output): - delattr(args, 'wait') - queries_run(args) - out = json.loads(query_output.getvalue()) - - jobId = out['jobId'] - sleep_interval = 2 - max_attempts = int(args.timeout) // sleep_interval - args.id = jobId - - done = False - for attempt in range(1, max_attempts): - query_check_output = io.StringIO() - with redirect_stdout(query_check_output): - queries_get(args) - out = json.loads(query_check_output.getvalue()) - status = out['status'] - if status == "DONE": - done = True - break - else: - if attempt == max_attempts: - break - time.sleep(sleep_interval) - - if not done: - print("failed to find job id " + jobId + " in DONE state within " + str(args.timeout) + " seconds") - print(str(out)) - sys.exit(2) - else: - print(str(json.dumps(out))) - else: - # Support input being in JSON format or bare CQL. - payload = read_file(args) - if payload[0] != "{": - data = {} - data['query'] = payload - payload = str(json.dumps(data)) - post("/api/v1/queries", headers, payload=payload) - -def subparser_queries_get(subparser): - sp = subparser.add_parser('get', help='Get results of a CQL query') - add_argument_id(sp) - sp.set_defaults(func=queries_get) - -def queries_get(args): - get("/api/v1/queries/" + args.id) -# Queries end - -# Resource Definitions start -def subparser_resource_definitions_opts(subparsers): - p = subparsers.add_parser('resource-definitions', help='resource definitions') - sp = p.add_subparsers(help='resource_definitions help') - - subparser_resource_definitions_create(sp) - subparser_resource_definitions_list(sp) - subparser_resource_definitions_delete(sp) - subparser_resource_definitions_retrieve(sp) - subparser_resource_definitions_update(sp) - -def subparser_resource_definitions_create(subparser): - sp = subparser.add_parser('create', - help='Create definition', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "description": "string", - "name": "string", - "schema": {}, - "type": "string" - } - - Examples: - --------- - Custom resource without any additional schema: - { - "description": "Open Policy Definitions", - "name": "OPA Policy", - "schema": {}, - "type": "opa" - } - - Custom resource with a defined schema: - { - "description": "Resource to capture and catalog basic attributes of a CI/CD system", - "name": "CI/CD Tooling", - "schema": { - { - "required": [ - "version", - "vendor" - ], - "properties": { - "version": { - "type": "string" - }, - "vendor": { - "type": "string" - } - } - } - "type": "ci-cd" - } - - - Additional documentation - ------------------------ - https://docs.cortex.io/docs/reference/basics/resource-catalog - https://docs.cortex.io/docs/api/create-definition - - Related commands - ---------------- - cortex resource - CLI command to add, update and delete resources. - ''')) - add_argument_file(sp, 'File containing JSON-formatted resource definition') - sp.set_defaults(func=resource_definitions_create) - -def resource_definitions_create(args): - headers = default_headers() - post("/api/v1/catalog/definitions", headers, read_file(args)) - -def subparser_resource_definitions_list(subparser): - sp = subparser.add_parser('list', help='List definition') - sp.set_defaults(func=resource_definitions_list) - -def resource_definitions_list(args): - get("/api/v1/catalog/definitions") - -def subparser_resource_definitions_delete(subparser): - sp = subparser.add_parser('delete', help='Delete definition') - add_argument_type(sp) - sp.set_defaults(func=resource_definitions_delete) - -def resource_definitions_delete(args): - delete("/api/v1/catalog/definitions/" + args.type) - -def subparser_resource_definitions_retrieve(subparser): - sp = subparser.add_parser('get', help='Get definition') - add_argument_type(sp) - sp.set_defaults(func=resource_definitions_retrieve) - -def resource_definitions_retrieve(args): - get("/api/v1/catalog/definitions/" + args.type) - -def subparser_resource_definitions_update(subparser): - sp = subparser.add_parser('update', help='Update definition') - add_argument_type(sp) - add_argument_file(sp, 'File containing updated JSON schema for resource definition') - add_argument_force(sp) - sp.set_defaults(func=resource_definitions_update) - -def resource_definitions_update(args): - headers = default_headers() - put("/api/v1/catalog/definitions/" + args.type + parse_opts(args), headers, payload=read_file(args)) -# Resource Definitions end - -# Scorecards start -def subparser_scorecards_opts(subparsers): - p = subparsers.add_parser('scorecards', help='scorecards API requests') - sp = p.add_subparsers(help='scorecards help') - - subparser_scorecards_create_or_update(sp) - subparser_scorecards_delete(sp) - subparser_scorecards_list(sp) - subparser_scorecards_shields_io_badge(sp) - subparser_scorecards_get(sp) - subparser_scorecards_descriptor(sp) - subparser_scorecards_next_steps(sp) - subparser_scorecards_scores(sp) - -def subparser_scorecards_create_or_update(subparser): - sp = subparser.add_parser('create', help='Create definition') - add_argument_file(sp, 'File containing openapi descriptor for scorecard') - sp.set_defaults(func=scorecards_create_or_update) - -def scorecards_create_or_update(args): - headers = default_headers('application/yaml') - post("/api/v1/scorecards/descriptor", headers, read_file(args)) - -def subparser_scorecards_delete(subparser): - sp = subparser.add_parser('delete', help='Delete scorecard') - add_argument_tag(sp) - sp.set_defaults(func=scorecards_delete) - -def scorecards_delete(args): - delete("/api/v1/scorecards/" + args.tag) - -def subparser_scorecards_list(subparser): - sp = subparser.add_parser('list', help='List scorecards') - add_argument_show_drafts(sp) - sp.set_defaults(func=scorecards_list) - -def scorecards_list(args): - get("/api/v1/scorecards" + parse_opts(args)) - -def subparser_scorecards_shields_io_badge(subparser): - sp = subparser.add_parser('shield', help='Retrieve scorecard shields.io badge') - add_argument_scorecard_tag(sp) - add_argument_tag(sp) - sp.set_defaults(func=scorecards_shields_io_badge) - -def scorecards_shields_io_badge(args): - get("/api/v1/scorecards/" + args.scorecardTag + "/entity/" + args.tag + "/badge") - -def subparser_scorecards_get(subparser): - sp = subparser.add_parser('get', help='Retrieve scorecard') - add_argument_tag(sp, 'Unique tag for the Scorecard') - sp.set_defaults(func=scorecards_get) - -def scorecards_get(args): - get("/api/v1/scorecards/" + args.tag) - -def subparser_scorecards_descriptor(subparser): - sp = subparser.add_parser('descriptor', help='Retrieve scorecard descriptor') - add_argument_tag(sp) - sp.set_defaults(func=scorecards_descriptor) - -def scorecards_descriptor(args): - get("/api/v1/scorecards/" + args.tag + "/descriptor") - -def subparser_scorecards_next_steps(subparser): - sp = subparser.add_parser('next-steps', help='Retrieve next steps for entity in scorecard') - add_argument_tag(sp, 'Unique tag for the scorecard') - add_argument_entity_tag(sp) - sp.set_defaults(func=scorecards_next_steps) - -def scorecards_next_steps(args): - get("/api/v1/scorecards/" + args.tag + "/next-steps" + parse_opts(args)) - -def subparser_scorecards_scores(subparser): - sp = subparser.add_parser('scores', help='Return latest scores for all entities in the Scorecard') - add_argument_tag(sp, 'Unique tag for the scorecard') - add_argument_entity_tag(sp, required=False) - sp.set_defaults(func=scorecards_scores) - -def scorecards_scores(args): - get("/api/v1/scorecards/" + args.tag + "/scores" + parse_opts(args, ['tag'])) -# Scorecards end - -# Teams Hierarchies start -def subparser_teams_hierarchies_opts(subparsers): - p = subparsers.add_parser('teams-hierarchies', help='commands to create and modify team hierarchies') - sp = p.add_subparsers(help='teams hierarchies help') - - subparser_teams_hierarchies_create(sp) - subparser_teams_hierarchies_get(sp) - subparser_teams_hierarchies_delete(sp) - subparser_teams_hierarchies_relationships(sp) - -def subparser_teams_hierarchies_create(subparser): - sp = subparser.add_parser('create', - help='Create a department', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Format of JSON-formatted configuration file: - -------------------------------------------- - { - "departmentTag": "string", - "description": "string", - "members": [ - { - "description": "string", - "email": "string", - "name": "string" - } - ], - "name": "string" - } - ''')) - add_argument_file(sp, 'file containing JSON-formatted content for new department') - sp.set_defaults(func=teams_hierarchies_create) - -def teams_hierarchies_create(args): - headers = default_headers() - post("/api/v1/teams/departments", headers, read_file(args)) - -def subparser_teams_hierarchies_get(subparser): - sp = subparser.add_parser('get', help='Get department details') - add_argument_departmentTag(sp) - sp.set_defaults(func=teams_hierarchies_get) - -def teams_hierarchies_get(args): - get("/api/v1/teams/departments/" + parse_opts(args)) - -def subparser_teams_hierarchies_delete(subparser): - sp = subparser.add_parser('delete', help='Delete a department') - add_argument_departmentTag(sp) - sp.set_defaults(func=teams_hierarchies_delete) - -def teams_hierarchies_delete(args): - delete("/api/v1/teams/departments/" + parse_opts(args)) - -def subparser_teams_hierarchies_relationships(subparser): - sp = subparser.add_parser('relationships', help='Get team relationships') - sp.set_defaults(func=teams_hierarchies_relationships) - -def teams_hierarchies_relationships(args): - get("/api/v1/teams/relationships") -# Teams hierarchies end - -# Teams start -def subparser_teams_opts(subparsers): - p = subparsers.add_parser('teams', help='commands to create and modify teams') - sp = p.add_subparsers(help='team help') - - subparser_teams_create(sp) - subparser_teams_get(sp) - subparser_teams_list(sp) - subparser_teams_delete(sp) - subparser_teams_archive(sp) - subparser_teams_unarchive(sp) - subparser_teams_update_metadata(sp) - subparser_teams_update_members(sp) - -def subparser_teams_create(subparser): - sp = subparser.add_parser('create', help='Create team') - add_argument_file(sp, 'file containing team openapi definition') - sp.set_defaults(func=teams_create) - -def teams_create(args): - headers = default_headers() - post("/api/v1/teams", headers, read_file(args)) - -def subparser_teams_get(subparser): - sp = subparser.add_parser('get', help='Get team details') - add_argument_teamTag(sp) - sp.set_defaults(func=teams_get) - -def teams_get(args): - get("/api/v1/teams/" + args.teamTag) - -def subparser_teams_list(subparser): - sp = subparser.add_parser('list', help='List teams') - add_argument_includeTeamsWithoutMembers(sp) - sp.set_defaults(func=teams_list) - -def teams_list(args): - get("/api/v1/teams" + parse_opts(args)) - -def subparser_teams_delete(subparser): - sp = subparser.add_parser('delete', help='Delete team') - add_argument_teamTag(sp, help_text="Name of team") - sp.set_defaults(func=teams_delete) - -def teams_delete(args): - delete("/api/v1/teams" + parse_opts(args), expected_rc=204) - -def subparser_teams_archive(subparser): - sp = subparser.add_parser('archive', help='Archive team') - add_argument_tag(sp, help_text="Name of team") - sp.set_defaults(func=teams_archive) - -def teams_archive(args): - put("/api/v1/teams/" + args.tag + "/archive") - -def subparser_teams_unarchive(subparser): - sp = subparser.add_parser('unarchive', help='Unarchive team') - add_argument_tag(sp, help_text="Name of team") - sp.set_defaults(func=teams_unarchive) - -def teams_unarchive(args): - put("/api/v1/teams/" + args.tag + "/unarchive") - -def subparser_teams_update_metadata(subparser): - sp = subparser.add_parser('update-metadata', help='Update team metadata') - add_argument_teamTag(sp) - add_argument_file(sp, 'JSON file containing team metadata updates') - sp.set_defaults(func=teams_update_metadata) - -def teams_update_metadata(args): - headers = default_headers() - put("/api/v1/teams/" + args.teamTag, headers, read_file(args)) - -def subparser_teams_update_members(subparser): - sp = subparser.add_parser('update-members', help='[Cortex-managed teams] Update team members') - add_argument_teamTag(sp) - add_argument_file(sp, 'JSON file containing team member updates') - sp.set_defaults(func=teams_update_members) - -def teams_update_members(args): - headers = default_headers() - post("/api/v1/teams" + parse_opts(args) + "/members", headers, read_file(args)) -# Teams end - -# The default input to parser.parse_args is sys.argv[1:], but we are passing args -# to cli (to facilitate simpler testing in pytest), so we need to set the default -# if args comes in from sys.argv. -def cli(argv=sys.argv[1:]): - parser = argparse.ArgumentParser( - prog='cortex CLI', - description='Cortex command line interface', - formatter_class=argparse.RawTextHelpFormatter, - epilog=textwrap.dedent('''\ - Type 'man cortex' for additional details. - ''')) - parser.add_argument('-c', '--config', help='Config location, default = ~/.cortex/config', default=os.path.expanduser('~') + '/.cortex/config') - parser.add_argument('-d', '--debug', help='Writes request debug information as JSON to stderr', action='store_true') - parser.add_argument('-n', '--noObfuscate', help='Do not obfuscate bearer token when debugging', action='store_true') - parser.add_argument('-q', '--quiet', help='Suppress warning messages when overriding tenant settings with environment variables', action='store_true') - parser.add_argument('-t', '--tenant', default='default', help='tenant name defined in ~/.cortex/config, defaults to \'default\'', metavar='') - parser.add_argument('-v', '--version', action='version', version=version()) - sp = parser.add_subparsers(help='sub-command help') - - subparser_audit_logs_opts(sp) - subparser_backup_opts(sp) - subparser_catalog_opts(sp) - subparser_custom_data_opts(sp) - subparser_custom_events_opts(sp) - subparser_dependencies_opts(sp) - subparser_deploys_opts(sp) - subparser_discovery_audit_opts(sp) - subparser_docs_opts(sp) - subparser_gitops_logs_opts(sp) - subparser_groups_opts(sp) - subparser_integrations_opts(sp) - subparser_ip_allowlist_opts(sp) - subparser_on_call_opts(sp) - subparser_packages_opts(sp) - subparser_plugins_opts(sp) - subparser_queries_opts(sp) - subparser_resource_definitions_opts(sp) - subparser_scorecards_opts(sp) - subparser_teams_hierarchies_opts(sp) - subparser_teams_opts(sp) - - replace_string = "REPLACE_WITH_YOUR_CORTEX_API_KEY" - validate_input(argv, parser) - args = parser.parse_args(argv) - # args = get_config(config, args, argv, parser, replace_string) - get_config(config, args, argv, parser, replace_string) - args.func(args) - -if __name__ == '__main__': - sys.exit(cli()) diff --git a/cortexapps_cli/cortex_client.py b/cortexapps_cli/cortex_client.py index b7e1ed8..b84eafe 100644 --- a/cortexapps_cli/cortex_client.py +++ b/cortexapps_cli/cortex_client.py @@ -54,6 +54,7 @@ def request(self, method, endpoint, params={}, headers={}, data=None, raw_body=F except json.JSONDecodeError: # if we can't parse the error message, just raise the HTTP error response.raise_for_status() + if raw_response: return response diff --git a/cortexapps_cli/utils.py b/cortexapps_cli/utils.py index 69ee6bd..65b0ec5 100644 --- a/cortexapps_cli/utils.py +++ b/cortexapps_cli/utils.py @@ -104,7 +104,7 @@ def humanize_value(value): return json.dumps(value, indent=2) return str(value) -def print_output(data, columns=None, filters=None, sort=None, output_format='json'): +def print_output(data, columns=None, filters=None, sort=None, output_format='json', no_headers=False): """ Print output in the specified format. @@ -113,6 +113,7 @@ def print_output(data, columns=None, filters=None, sort=None, output_format='jso columns: A list of columns to include in the output. filters: A list of filters to apply to the data. output_format: The format to print the data in. + no_headers: if column headers should not be shown """ if output_format is None: @@ -178,8 +179,9 @@ def print_output(data, columns=None, filters=None, sort=None, output_format='jso console = Console() console.print(table) elif output_format == 'csv': - csv_writer = csv.writer(sys.stdout) - csv_writer.writerow(column_headers) + csv_writer = csv.writer(sys.stdout, lineterminator='\n') + if not no_headers: + csv_writer.writerow(column_headers) csv_writer.writerows(rows) def print_output_with_context(ctx: typer.Context, data): @@ -188,6 +190,7 @@ def print_output_with_context(ctx: typer.Context, data): sort = ctx.params.get('sort', None) table_output = ctx.params.get('table_output', None) csv_output = ctx.params.get('csv_output', None) + no_headers = ctx.params.get('no_headers', None) if table_output and csv_output: raise typer.BadParameter("Only one of --table and --csv can be specified") if table_output: @@ -196,4 +199,4 @@ def print_output_with_context(ctx: typer.Context, data): output_format = 'csv' else: output_format = 'json' - print_output(data, columns=columns, filters=filters, sort=sort, output_format=output_format) + print_output(data, columns=columns, filters=filters, sort=sort, output_format=output_format, no_headers=no_headers) diff --git a/data/run-time/archive-entity.yaml b/data/import/catalog/cli-test-archive-entity.yaml similarity index 82% rename from data/run-time/archive-entity.yaml rename to data/import/catalog/cli-test-archive-entity.yaml index c35a7f8..7bc8b79 100644 --- a/data/run-time/archive-entity.yaml +++ b/data/import/catalog/cli-test-archive-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Archive Entity description: Entity that will be created and then archived to test catalog archive entity - x-cortex-tag: archive-entity + x-cortex-tag: cli-test-archive-entity x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/create-entity.yaml b/data/import/catalog/cli-test-create-entity.yaml similarity index 81% rename from data/run-time/create-entity.yaml rename to data/import/catalog/cli-test-create-entity.yaml index 38c7427..5468d0f 100644 --- a/data/run-time/create-entity.yaml +++ b/data/import/catalog/cli-test-create-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Create Entity description: Entity that will be created to test catalog create entity - x-cortex-tag: create-entity + x-cortex-tag: cli-test-create-entity x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/delete-entity.yaml b/data/import/catalog/cli-test-delete-entity.yaml similarity index 83% rename from data/run-time/delete-entity.yaml rename to data/import/catalog/cli-test-delete-entity.yaml index 088138c..a849a4a 100644 --- a/data/run-time/delete-entity.yaml +++ b/data/import/catalog/cli-test-delete-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Delete Entity description: Entity that will be created and then deleted to test catalog delete entity - x-cortex-tag: delete-entity + x-cortex-tag: cli-test-delete-entity x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/docs-entity.yaml b/data/import/catalog/cli-test-docs-entity.yaml similarity index 82% rename from data/run-time/docs-entity.yaml rename to data/import/catalog/cli-test-docs-entity.yaml index 5b9805f..6c4b89d 100644 --- a/data/run-time/docs-entity.yaml +++ b/data/import/catalog/cli-test-docs-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Docs Entity description: Entity that will be created and then updated with OpenAPI docs - x-cortex-tag: docs-entity + x-cortex-tag: cli-test-docs-entity x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/test-domain-child.yaml b/data/import/catalog/cli-test-domain-child.yaml similarity index 66% rename from data/run-time/test-domain-child.yaml rename to data/import/catalog/cli-test-domain-child.yaml index 4977f1e..4b5dd26 100644 --- a/data/run-time/test-domain-child.yaml +++ b/data/import/catalog/cli-test-domain-child.yaml @@ -1,9 +1,9 @@ openapi: 3.0.0 info: title: Test Domain Child - x-cortex-tag: test-domain-child + x-cortex-tag: cli-test-domain-child x-cortex-type: domain x-cortex-groups: - cli-test x-cortex-children: - - tag: test-service + - tag: cli-test-service diff --git a/data/run-time/test-domain-parent.yaml b/data/import/catalog/cli-test-domain-parent.yaml similarity index 64% rename from data/run-time/test-domain-parent.yaml rename to data/import/catalog/cli-test-domain-parent.yaml index 4fdd0f2..188306c 100644 --- a/data/run-time/test-domain-parent.yaml +++ b/data/import/catalog/cli-test-domain-parent.yaml @@ -1,9 +1,9 @@ openapi: 3.0.0 info: title: Test Domain Parent - x-cortex-tag: test-domain-parent + x-cortex-tag: cli-test-domain-parent x-cortex-type: domain x-cortex-groups: - cli-test x-cortex-children: - - tag: test-domain-child + - tag: cli-test-domain-child diff --git a/data/run-time/groups-entity.yaml b/data/import/catalog/cli-test-groups-entity.yaml similarity index 81% rename from data/run-time/groups-entity.yaml rename to data/import/catalog/cli-test-groups-entity.yaml index 6cddbcd..5e33198 100644 --- a/data/run-time/groups-entity.yaml +++ b/data/import/catalog/cli-test-groups-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Groups Entity description: Entity that will be created and then used in Groups tests - x-cortex-tag: groups-entity + x-cortex-tag: cli-test-groups-entity x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/test-service-callee.yaml b/data/import/catalog/cli-test-service-callee.yaml similarity index 99% rename from data/run-time/test-service-callee.yaml rename to data/import/catalog/cli-test-service-callee.yaml index 1481f04..a23b6c5 100644 --- a/data/run-time/test-service-callee.yaml +++ b/data/import/catalog/cli-test-service-callee.yaml @@ -1,10 +1,10 @@ openapi: 3.0.1 info: title: Test Service Callee - x-cortex-tag: test-service-callee + x-cortex-tag: cli-test-service-callee x-cortex-type: service x-cortex-groups: - - cli-api-test + - cli-test servers: - url: https://api.getcortexapp.com description: Cortex Cloud API host diff --git a/data/run-time/test-service-caller.yaml b/data/import/catalog/cli-test-service-caller.yaml similarity index 73% rename from data/run-time/test-service-caller.yaml rename to data/import/catalog/cli-test-service-caller.yaml index 2b27f1c..88dae5a 100644 --- a/data/run-time/test-service-caller.yaml +++ b/data/import/catalog/cli-test-service-caller.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Service Caller - x-cortex-tag: test-service-caller + x-cortex-tag: cli-test-service-caller x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/test-service-group-1.yaml b/data/import/catalog/cli-test-service-group-1.yaml similarity index 75% rename from data/run-time/test-service-group-1.yaml rename to data/import/catalog/cli-test-service-group-1.yaml index 084568a..eee671f 100644 --- a/data/run-time/test-service-group-1.yaml +++ b/data/import/catalog/cli-test-service-group-1.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Service Group 1 - x-cortex-tag: test-service-group-1 + x-cortex-tag: cli-test-service-group-1 x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/test-service-group-2.yaml b/data/import/catalog/cli-test-service-group-2.yaml similarity index 75% rename from data/run-time/test-service-group-2.yaml rename to data/import/catalog/cli-test-service-group-2.yaml index 7649fa9..ff8d853 100644 --- a/data/run-time/test-service-group-2.yaml +++ b/data/import/catalog/cli-test-service-group-2.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Service Group 2 - x-cortex-tag: test-service-group-2 + x-cortex-tag: cli-test-service-group-2 x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/test-service-links.yaml b/data/import/catalog/cli-test-service-links.yaml similarity index 84% rename from data/run-time/test-service-links.yaml rename to data/import/catalog/cli-test-service-links.yaml index 73d84af..664b2e8 100644 --- a/data/run-time/test-service-links.yaml +++ b/data/import/catalog/cli-test-service-links.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Service Links - x-cortex-tag: test-service-links + x-cortex-tag: cli-test-service-links x-cortex-type: service x-cortex-link: - url: https://cortex.io diff --git a/data/run-time/test-service-metadata.yaml b/data/import/catalog/cli-test-service-metadata.yaml similarity index 80% rename from data/run-time/test-service-metadata.yaml rename to data/import/catalog/cli-test-service-metadata.yaml index df0f9b0..a4d78a8 100644 --- a/data/run-time/test-service-metadata.yaml +++ b/data/import/catalog/cli-test-service-metadata.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Service Metadata - x-cortex-tag: test-service-metadata + x-cortex-tag: cli-test-service-metadata x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/test-service-test-team-1.yaml b/data/import/catalog/cli-test-service-test-team-1.yaml similarity index 71% rename from data/run-time/test-service-test-team-1.yaml rename to data/import/catalog/cli-test-service-test-team-1.yaml index e33dfb4..a7df19a 100644 --- a/data/run-time/test-service-test-team-1.yaml +++ b/data/import/catalog/cli-test-service-test-team-1.yaml @@ -1,10 +1,10 @@ openapi: 3.0.1 info: title: Test Service Team 1 - x-cortex-tag: test-service-team-1 + x-cortex-tag: cli-test-service-team-1 x-cortex-type: service x-cortex-owners: - - name: test-team-1 + - name: cli-test-team-1 type: GROUP provider: CORTEX x-cortex-groups: diff --git a/data/run-time/test-service-test-team-2.yaml b/data/import/catalog/cli-test-service-test-team-2.yaml similarity index 70% rename from data/run-time/test-service-test-team-2.yaml rename to data/import/catalog/cli-test-service-test-team-2.yaml index 29cf759..b937107 100644 --- a/data/run-time/test-service-test-team-2.yaml +++ b/data/import/catalog/cli-test-service-test-team-2.yaml @@ -1,10 +1,10 @@ openapi: 3.0.1 info: title: Test Service Test Team 2 - x-cortex-tag: test-service-test-team-2 + x-cortex-tag: cli-test-service-test-team-2 x-cortex-type: service x-cortex-owners: - - name: test-team-2 + - name: cli-test-team-2 type: GROUP provider: CORTEX x-cortex-groups: diff --git a/data/run-time/test-service.yaml b/data/import/catalog/cli-test-service.yaml similarity index 86% rename from data/run-time/test-service.yaml rename to data/import/catalog/cli-test-service.yaml index f261543..b726b62 100644 --- a/data/run-time/test-service.yaml +++ b/data/import/catalog/cli-test-service.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Service - x-cortex-tag: test-service + x-cortex-tag: cli-test-service x-cortex-git: github: repository: my-org/my-repo diff --git a/data/run-time/test-team-1.yaml b/data/import/catalog/cli-test-team-1.yaml similarity index 88% rename from data/run-time/test-team-1.yaml rename to data/import/catalog/cli-test-team-1.yaml index aa4fecd..5fcdb2a 100644 --- a/data/run-time/test-team-1.yaml +++ b/data/import/catalog/cli-test-team-1.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Team 1 - x-cortex-tag: test-team-1 + x-cortex-tag: cli-test-team-1 x-cortex-type: team x-cortex-team: members: diff --git a/data/run-time/test-team-2.yaml b/data/import/catalog/cli-test-team-2.yaml similarity index 87% rename from data/run-time/test-team-2.yaml rename to data/import/catalog/cli-test-team-2.yaml index acacce8..15381fe 100644 --- a/data/run-time/test-team-2.yaml +++ b/data/import/catalog/cli-test-team-2.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Team 2 - x-cortex-tag: test-team-2 + x-cortex-tag: cli-test-team-2 x-cortex-type: team x-cortex-team: members: diff --git a/data/run-time/test-team-child.yaml b/data/import/catalog/cli-test-team-child.yaml similarity index 87% rename from data/run-time/test-team-child.yaml rename to data/import/catalog/cli-test-team-child.yaml index 78d9727..2d6bd00 100644 --- a/data/run-time/test-team-child.yaml +++ b/data/import/catalog/cli-test-team-child.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Team Child - x-cortex-tag: test-team-child + x-cortex-tag: cli-test-team-child x-cortex-type: team x-cortex-team: members: diff --git a/data/run-time/test-team-parent.yaml b/data/import/catalog/cli-test-team-parent.yaml similarity index 80% rename from data/run-time/test-team-parent.yaml rename to data/import/catalog/cli-test-team-parent.yaml index 15505df..17382ee 100644 --- a/data/run-time/test-team-parent.yaml +++ b/data/import/catalog/cli-test-team-parent.yaml @@ -1,7 +1,7 @@ openapi: 3.0.1 info: title: Test Team Parent - x-cortex-tag: test-team-parent + x-cortex-tag: cli-test-team-parent x-cortex-type: team x-cortex-team: members: @@ -9,6 +9,6 @@ info: name: Richard LaFleur notificationsEnabled: false x-cortex-children: - - tag: test-team-child + - tag: cli-test-team-child x-cortex-groups: - cli-test diff --git a/data/run-time/unarchive-entity.yaml b/data/import/catalog/cli-test-unarchive-entity.yaml similarity index 80% rename from data/run-time/unarchive-entity.yaml rename to data/import/catalog/cli-test-unarchive-entity.yaml index 156e633..bc25539 100644 --- a/data/run-time/unarchive-entity.yaml +++ b/data/import/catalog/cli-test-unarchive-entity.yaml @@ -2,7 +2,7 @@ openapi: 3.0.0 info: title: Unarchive Entity description: Entity that will be created, archived and then un-archived. - x-cortex-tag: unarchive-entity + x-cortex-tag: cli-test-unarchive-entity x-cortex-type: service x-cortex-groups: - cli-test diff --git a/data/run-time/create-entity-type-empty-schema.json b/data/import/entity-types/cli-test.json similarity index 77% rename from data/run-time/create-entity-type-empty-schema.json rename to data/import/entity-types/cli-test.json index f0dd863..132a29e 100644 --- a/data/run-time/create-entity-type-empty-schema.json +++ b/data/import/entity-types/cli-test.json @@ -2,5 +2,5 @@ "description": "This is a test entity type definition.", "name": "CLI Test With Empty Schema", "schema": {}, - "type": "cli-test-empty-schema" + "type": "cli-test" } diff --git a/data/import/ip-allowlist/ip-allowlist.json b/data/import/ip-allowlist/ip-allowlist.json new file mode 100644 index 0000000..3770478 --- /dev/null +++ b/data/import/ip-allowlist/ip-allowlist.json @@ -0,0 +1,8 @@ +{ + "entries": [ + { + "address": "0.0.0.0/0", + "description": "all IPv4 rangeP" + } + ] +} diff --git a/tests/test_plugins.json b/data/import/plugins/cli-test-plugin.json similarity index 85% rename from tests/test_plugins.json rename to data/import/plugins/cli-test-plugin.json index cbb98af..2078298 100644 --- a/tests/test_plugins.json +++ b/data/import/plugins/cli-test-plugin.json @@ -14,6 +14,6 @@ "description": "Simple Plugin", "isDraft": false, "minimumRoleRequired": "VIEWER", - "name": "My Test Plugin", - "tag": "my-test-plugin" + "name": "CLI Test Plugin", + "tag": "cli-test-plugin" } diff --git a/tests/test_scorecards_draft.yaml b/data/import/scorecards/cli-test-draft-scorecard.yaml similarity index 87% rename from tests/test_scorecards_draft.yaml rename to data/import/scorecards/cli-test-draft-scorecard.yaml index c16e9c1..72f4e86 100644 --- a/tests/test_scorecards_draft.yaml +++ b/data/import/scorecards/cli-test-draft-scorecard.yaml @@ -1,5 +1,5 @@ -tag: test-scorecard-draft -name: Test Scorecard Draft +tag: cli-test-draft-scorecard +name: CLI Test Draft Scorecard description: Used to test Cortex CLI draft: true ladder: diff --git a/tests/test_scorecards.yaml b/data/import/scorecards/cli-test-scorecard.yaml similarity index 83% rename from tests/test_scorecards.yaml rename to data/import/scorecards/cli-test-scorecard.yaml index 1edc99d..33beaed 100644 --- a/tests/test_scorecards.yaml +++ b/data/import/scorecards/cli-test-scorecard.yaml @@ -1,5 +1,5 @@ -tag: test-scorecard -name: Test Scorecard +tag: cli-test-scorecard +name: CLI Test Scorecard description: Used to test Cortex CLI draft: false ladder: @@ -23,5 +23,5 @@ rules: filter: category: SERVICE filter: - query: 'entity.tag() == "test-service"' + query: 'entity.tag() == "cli-test-service"' category: SERVICE diff --git a/data/import/workflows/cli-test-workflow.yaml b/data/import/workflows/cli-test-workflow.yaml new file mode 100644 index 0000000..7389049 --- /dev/null +++ b/data/import/workflows/cli-test-workflow.yaml @@ -0,0 +1,20 @@ +name: CLI Test Workflowe +tag: cli-test-workflow +description: My test workflow. +isDraft: true +filter: + type: GLOBAL +runResponseTemplate: null +actions: +- name: Send message + slug: send-message + schema: + channel: air-force + message: "Workflow submitted by: {{context.initiatedBy.name}}\nRoles: \n{{#context.initiatedBy.roles}}\n\ + role name: {{name}}\n{{/context.initiatedBy.roles}}" + type: SLACK + outgoingActions: [] + isRootAction: true +runRestrictionPolicies: [] +iconTag: null + diff --git a/data/run-time/custom-data-bulk.json b/data/run-time/custom-data-bulk.json index a50d6f4..3f3056a 100644 --- a/data/run-time/custom-data-bulk.json +++ b/data/run-time/custom-data-bulk.json @@ -1,6 +1,6 @@ { "values": { - "test-service-caller": [ + "cli-test-service-caller": [ { "key": "bulk-key-1", "value": "value-1" @@ -10,7 +10,7 @@ "value": "value-2" } ], - "test-service-callee": [ + "cli-test-service-callee": [ { "key": "bulk-key-3", "value": "value-3" diff --git a/data/run-time/dependencies-bulk.json b/data/run-time/dependencies-bulk.json index ccde592..fe0d250 100644 --- a/data/run-time/dependencies-bulk.json +++ b/data/run-time/dependencies-bulk.json @@ -1,6 +1,6 @@ { "values": { - "test-service-caller": [ + "cli-test-service-caller": [ { "description": "dependency description", "metadata": { @@ -9,7 +9,7 @@ }, "method": "GET", "path": "/api/v1/github/configurations", - "tag": "test-service-callee" + "tag": "cli-test-service-callee" } ] } diff --git a/data/run-time/update-entity-type-empty-schema.json b/data/run-time/entity-type-update.json similarity index 100% rename from data/run-time/update-entity-type-empty-schema.json rename to data/run-time/entity-type-update.json diff --git a/data/run-time/scorecard.yaml b/data/run-time/scorecard.yaml deleted file mode 100644 index 7345cf7..0000000 --- a/data/run-time/scorecard.yaml +++ /dev/null @@ -1,21 +0,0 @@ -tag: cli-test-scorecard -name: CLI Test Scorecard -description: Used to test Cortex public API -draft: false -ladder: - name: Default Ladder - levels: - - name: Gold - rank: 1 - description: Gold tier - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("hasSecurityScans") != null - weight: 1 - level: Gold - filter: - category: SERVICE -filter: - query: 'entity.tag() == "test-service"' - category: SERVICE diff --git a/data/run-time/scorecard_drafts.yaml b/data/run-time/scorecard_drafts.yaml deleted file mode 100644 index 5a8d558..0000000 --- a/data/run-time/scorecard_drafts.yaml +++ /dev/null @@ -1,21 +0,0 @@ -tag: cli-test-draft-scorecard -name: CLI Test Draft Scorecard -description: Used to test Scorecard drafts with Cortex public API -draft: true -ladder: - name: Default Ladder - levels: - - name: Gold - rank: 1 - description: Gold tier - color: 7cf376 -rules: -- title: Has Custom Data - expression: custom("hasSecurityScans") != null - weight: 1 - level: Gold - filter: - category: SERVICE -filter: - query: 'entity.tag() == "test-service"' - category: SERVICE diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py index 981d309..061da45 100644 --- a/tests/helpers/utils.py +++ b/tests/helpers/utils.py @@ -42,5 +42,3 @@ def cli(params, return_type=ReturnType.JSON): return result case ReturnType.STDOUT: return result.stdout - case ReturnType.STDERR: - return result.stderr diff --git a/tests/test_catalog_archive_entity.py b/tests/test_catalog_archive_entity.py index 8f81f9b..c6aa7da 100644 --- a/tests/test_catalog_archive_entity.py +++ b/tests/test_catalog_archive_entity.py @@ -1,7 +1,6 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/archive-entity.yaml"]) cli(["catalog", "archive", "-t", "archive-entity"]) response = cli(["catalog", "details", "-t", "archive-entity"]) diff --git a/tests/test_catalog_create_entity.py b/tests/test_catalog_create_entity.py index a7a1bb8..fd2f8dd 100644 --- a/tests/test_catalog_create_entity.py +++ b/tests/test_catalog_create_entity.py @@ -1,7 +1,7 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + cli(["catalog", "create", "-f", "data/import/catalog/cli-test-service.yaml"]) - response = cli(["catalog", "descriptor", "-t", "test-service"]) - assert response['info']['x-cortex-tag'] == "test-service" + response = cli(["catalog", "descriptor", "-t", "cli-test-service"]) + assert response['info']['x-cortex-tag'] == "cli-test-service" diff --git a/tests/test_catalog_create_entity_viewer.py b/tests/test_catalog_create_entity_viewer.py index db6db01..70c4662 100644 --- a/tests/test_catalog_create_entity_viewer.py +++ b/tests/test_catalog_create_entity_viewer.py @@ -3,6 +3,6 @@ # Using a key with viewer role should be Forbidden. @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY_VIEWER']}) def test(capsys): - response = cli(["catalog", "create", "-f", "data/run-time/create-entity.yaml"], ReturnType.RAW) + response = cli(["catalog", "create", "-f", "data/import/catalog/cli-test-create-entity.yaml"], ReturnType.RAW) assert "HTTP Error 403:" in response.stdout, "command fails with 403 error" diff --git a/tests/test_catalog_delete_entity.py b/tests/test_catalog_delete_entity.py index 3f51d2c..8944d07 100644 --- a/tests/test_catalog_delete_entity.py +++ b/tests/test_catalog_delete_entity.py @@ -1,13 +1,12 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/delete-entity.yaml"]) - response = cli(["catalog", "details", "-t", "delete-entity"]) - assert response['tag'] == 'delete-entity', "Should find newly created entity" + response = cli(["catalog", "details", "-t", "cli-test-delete-entity"]) + assert response['tag'] == 'cli-test-delete-entity', "Should find newly created entity" - cli(["catalog", "delete", "-t", "delete-entity"]) + cli(["catalog", "delete", "-t", "cli-test-delete-entity"]) # Since entity is deleted, cli command should exit with a Not Found, 404 error. - response = cli(["catalog", "details", "-t", "delete-entity"], ReturnType.RAW) + response = cli(["catalog", "details", "-t", "cli-test-delete-entity"], ReturnType.RAW) assert "HTTP Error 404:" in response.stdout, "command fails with 403 error" diff --git a/tests/test_catalog_get_entity_details.py b/tests/test_catalog_get_entity_details.py index 534b226..0d261de 100644 --- a/tests/test_catalog_get_entity_details.py +++ b/tests/test_catalog_get_entity_details.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - response = cli( ["catalog", "details", "-t", "test-service"]) - assert response['tag'] == 'test-service', "Entity details should be returned" + response = cli( ["catalog", "details", "-t", "cli-test-service"]) + assert response['tag'] == 'cli-test-service', "Entity details should be returned" diff --git a/tests/test_catalog_get_entity_details_hierarchy.py b/tests/test_catalog_get_entity_details_hierarchy.py index 4fda28e..a487680 100644 --- a/tests/test_catalog_get_entity_details_hierarchy.py +++ b/tests/test_catalog_get_entity_details_hierarchy.py @@ -1,9 +1,6 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-domain-parent.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-domain-child.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "details", "-i", "groups", "-t", "test-service"]) + response = cli(["catalog", "details", "-i", "groups", "-t", "cli-test-service"]) assert response['hierarchy']['parents'][0]['groups'][0] == 'cli-test', "Entity groups should be in response" assert response['hierarchy']['parents'][0]['parents'][0]['groups'][0] == 'cli-test', "Parent groups should be in response" diff --git a/tests/test_catalog_include_links.py b/tests/test_catalog_include_links.py index c998c0c..24c08ab 100644 --- a/tests/test_catalog_include_links.py +++ b/tests/test_catalog_include_links.py @@ -1,8 +1,6 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "create", "-f", "data/run-time/test-service-links.yaml"]) - response = cli(["catalog", "list", "-g", "include-links-test"]) assert (len(response['entities'][0]['links']) == 0) response = cli(["catalog", "list", "-g", "include-links-test", "-l"]) diff --git a/tests/test_catalog_include_metadata.py b/tests/test_catalog_include_metadata.py index 6bbac08..6a70e9a 100644 --- a/tests/test_catalog_include_metadata.py +++ b/tests/test_catalog_include_metadata.py @@ -1,8 +1,6 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service-metadata.yaml"]) - response = cli(["catalog", "list", "-g", "include-metadata-test"]) assert (len(response['entities'][0]['metadata']) == 0) diff --git a/tests/test_catalog_include_nested_fields.py b/tests/test_catalog_include_nested_fields.py index 71349ef..1702e9f 100644 --- a/tests/test_catalog_include_nested_fields.py +++ b/tests/test_catalog_include_nested_fields.py @@ -1,9 +1,14 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "create", "-f", "data/run-time/test-team-1.yaml"]) + response = cli(["catalog", "list", "-t", "team"], ReturnType.STDOUT) + + if "HTTP Error 400: Bad Request - Cannot request teams." in response: + print("This test requires feature flag ALLOW_TEAM_ENTITIES_IN_CATALOG_API, which does not appear to be set, so not running test.") + print("This flag will eventually be set for all workspaces and this check can be removed. However, as of June 2025 this has not been done.") + return response = cli(["catalog", "list", "-g", "cli-test", "-io", "-in", "team:members"]) - list = [entity for entity in response['entities'] if entity['tag'] == "test-team-1"] + list = [entity for entity in response['entities'] if entity['tag'] == "cli-test-team-1"] assert not list == None, "found an entity in response" assert len(list[0]['members']) > 0, "response has non-empty array of members" diff --git a/tests/test_catalog_list_by_group_multiple.py b/tests/test_catalog_list_by_group_multiple.py index 192a609..0cce1ad 100644 --- a/tests/test_catalog_list_by_group_multiple.py +++ b/tests/test_catalog_list_by_group_multiple.py @@ -1,8 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service-group-1.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-service-group-2.yaml"]) - response = cli(["catalog", "list", "-g", "cli-test-group-1,cli-test-group-2"]) assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_group_single.py b/tests/test_catalog_list_by_group_single.py index 811bdcd..1fa785b 100644 --- a/tests/test_catalog_list_by_group_single.py +++ b/tests/test_catalog_list_by_group_single.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service-group-1.yaml"]) - response = cli(["catalog", "list", "-g", "cli-test-group-1"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_owners_multiple.py b/tests/test_catalog_list_by_owners_multiple.py index 8add790..efaba14 100644 --- a/tests/test_catalog_list_by_owners_multiple.py +++ b/tests/test_catalog_list_by_owners_multiple.py @@ -1,10 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-team-1.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-team-2.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-service-test-team-1.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-service-test-team-2.yaml"]) - - response = cli(["catalog", "list", "-o", "test-team-1,test-team-2"]) + response = cli(["catalog", "list", "-o", "cli-test-team-1,cli-test-team-2"]) assert (response['total'] == 2) diff --git a/tests/test_catalog_list_by_owners_single.py b/tests/test_catalog_list_by_owners_single.py index 6adf2b1..4ba18c2 100644 --- a/tests/test_catalog_list_by_owners_single.py +++ b/tests/test_catalog_list_by_owners_single.py @@ -1,8 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-team-1.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-service-test-team-1.yaml"]) - - response = cli(["catalog", "list", "-o", "test-team-1"]) + response = cli(["catalog", "list", "-o", "cli-test-team-1"]) assert (response['total'] == 1) diff --git a/tests/test_catalog_list_by_types.py b/tests/test_catalog_list_by_types.py index c7bed2f..1081ee0 100644 --- a/tests/test_catalog_list_by_types.py +++ b/tests/test_catalog_list_by_types.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list", "-g", "cli-test", "-t", "service"]) assert response['total'] > 0, "Should find at least 1 entity of type 'service'" diff --git a/tests/test_catalog_list_entity_descriptors.py b/tests/test_catalog_list_entity_descriptors.py index 7935690..c4962b0 100644 --- a/tests/test_catalog_list_entity_descriptors.py +++ b/tests/test_catalog_list_entity_descriptors.py @@ -1,9 +1,7 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list-descriptors", "-t", "service"]) - list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "test-service"] + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "cli-test-service"] assert list[0]['info']['x-cortex-groups'][0] == "cli-test" diff --git a/tests/test_catalog_list_entity_descriptors_page.py b/tests/test_catalog_list_entity_descriptors_page.py index 3f38775..170c97a 100644 --- a/tests/test_catalog_list_entity_descriptors_page.py +++ b/tests/test_catalog_list_entity_descriptors_page.py @@ -1,7 +1,6 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) response = cli(["catalog", "list-descriptors", "-t", "service", "-p", "0", "-z", "1"]) # YAML descriptor has single quotes, so cannot read it as valid JSON. First convert to double quotes. diff --git a/tests/test_catalog_list_entity_descriptors_page_size.py b/tests/test_catalog_list_entity_descriptors_page_size.py index 8a62fea..eedc1b3 100644 --- a/tests/test_catalog_list_entity_descriptors_page_size.py +++ b/tests/test_catalog_list_entity_descriptors_page_size.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list-descriptors", "-t", "service", "-p", "0", "-z", "1"]) assert (len(response['descriptors']) == 1) diff --git a/tests/test_catalog_list_entity_descriptors_yaml.py b/tests/test_catalog_list_entity_descriptors_yaml.py index 1644e6d..383974b 100644 --- a/tests/test_catalog_list_entity_descriptors_yaml.py +++ b/tests/test_catalog_list_entity_descriptors_yaml.py @@ -1,8 +1,6 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list-descriptors", "-y", "--types", "service"]) - list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "test-service"] + list = [descriptor for descriptor in response['descriptors'] if descriptor['info']['x-cortex-tag'] == "cli-test-service"] assert list[0]['info']['x-cortex-custom-metadata']['cicd'] == "circle-ci" diff --git a/tests/test_catalog_list_include_archived.py b/tests/test_catalog_list_include_archived.py index 0582ad0..a9b33e4 100644 --- a/tests/test_catalog_list_include_archived.py +++ b/tests/test_catalog_list_include_archived.py @@ -1,11 +1,10 @@ from tests.helpers.utils import * def test(capsys): - response = cli(["catalog", "create", "-f", "data/run-time/archive-entity.yaml"]) - response = cli(["catalog", "archive", "-t", "archive-entity"]) + response = cli(["catalog", "archive", "-t", "cli-test-archive-entity"]) response = cli(["catalog", "list", "-g", "cli-test", "-z", "500"]) - assert not any(entity['tag'] == 'archive-entity' for entity in response['entities']), "Should not find archived entity" + assert not any(entity['tag'] == 'cli-test-archive-entity' for entity in response['entities']), "Should not find archived entity" response = cli(["catalog", "list", "-g", "cli-test", "-a", "-z", "500"]) - assert any(entity['tag'] == 'archive-entity' for entity in response['entities']), "Should find archived entity" + assert any(entity['tag'] == 'cli-test-archive-entity' for entity in response['entities']), "Should find archived entity" diff --git a/tests/test_catalog_list_include_owners.py b/tests/test_catalog_list_include_owners.py index 1050a58..03b8cea 100644 --- a/tests/test_catalog_list_include_owners.py +++ b/tests/test_catalog_list_include_owners.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(capsys): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list", "-g", "cli-test", "-io"]) assert not(response['entities'][0]['owners']['teams'] is None), "Teams array should be returned in result" diff --git a/tests/test_catalog_list_page.py b/tests/test_catalog_list_page.py index 5dbb0bd..b8d29fa 100644 --- a/tests/test_catalog_list_page.py +++ b/tests/test_catalog_list_page.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(capsys): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list", "-g", "cli-test", "-p", "0"]) assert (len(response['entities']) > 0) diff --git a/tests/test_catalog_list_page_size.py b/tests/test_catalog_list_page_size.py index 664a387..046fd47 100644 --- a/tests/test_catalog_list_page_size.py +++ b/tests/test_catalog_list_page_size.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - response = cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - response = cli(["catalog", "list", "-g", "cli-test", "-p", "0", "-z", "1"]) assert (len(response['entities']) == 1) diff --git a/tests/test_catalog_retrieve_entity_descriptor.py b/tests/test_catalog_retrieve_entity_descriptor.py index a7a1bb8..7f687a4 100644 --- a/tests/test_catalog_retrieve_entity_descriptor.py +++ b/tests/test_catalog_retrieve_entity_descriptor.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - response = cli(["catalog", "descriptor", "-t", "test-service"]) - assert response['info']['x-cortex-tag'] == "test-service" + response = cli(["catalog", "descriptor", "-t", "cli-test-service"]) + assert response['info']['x-cortex-tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_descriptor_yaml.py b/tests/test_catalog_retrieve_entity_descriptor_yaml.py index 3a304fb..2f323d0 100644 --- a/tests/test_catalog_retrieve_entity_descriptor_yaml.py +++ b/tests/test_catalog_retrieve_entity_descriptor_yaml.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - response = cli(["catalog", "descriptor", "-y", "-t", "test-service"], ReturnType.STDOUT) - assert yaml.safe_load(response)['info']['x-cortex-tag'] == "test-service" + response = cli(["catalog", "descriptor", "-y", "-t", "cli-test-service"], ReturnType.STDOUT) + assert yaml.safe_load(response)['info']['x-cortex-tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_details.py b/tests/test_catalog_retrieve_entity_details.py index 6e78de8..4af0be7 100644 --- a/tests/test_catalog_retrieve_entity_details.py +++ b/tests/test_catalog_retrieve_entity_details.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - response = cli(["catalog", "details", "-t", "test-service"]) - assert response['tag'] == "test-service" + response = cli(["catalog", "details", "-t", "cli-test-service"]) + assert response['tag'] == "cli-test-service" diff --git a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py index 9670523..c8e1149 100644 --- a/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py +++ b/tests/test_catalog_retrieve_entity_details_hierarchy_fields.py @@ -1,7 +1,5 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - response = cli(["catalog", "details", "-t", "test-service", "-i", "groups"]) - assert response['tag'] == "test-service" + response = cli(["catalog", "details", "-t", "cli-test-service", "-i", "groups"]) + assert response['tag'] == "cli-test-service" diff --git a/tests/test_catalog_unarchive_entity.py b/tests/test_catalog_unarchive_entity.py index 2da2cd0..102a506 100644 --- a/tests/test_catalog_unarchive_entity.py +++ b/tests/test_catalog_unarchive_entity.py @@ -1,11 +1,10 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/unarchive-entity.yaml"]) - cli(["catalog", "archive", "-t", "unarchive-entity"]) + cli(["catalog", "archive", "-t", "cli-test-unarchive-entity"]) - response = cli(["catalog", "details", "-t", "unarchive-entity"]) + response = cli(["catalog", "details", "-t", "cli-test-unarchive-entity"]) assert response['isArchived'] == True, "isArchived attribute should be true" - response = cli(["catalog", "unarchive", "-t", "unarchive-entity"]) + response = cli(["catalog", "unarchive", "-t", "cli-test-unarchive-entity"]) assert response['isArchived'] == False, "isArchived attribute should not be true" diff --git a/tests/test_config_file.py b/tests/test_config_file.py index 27682a4..6455f58 100644 --- a/tests/test_config_file.py +++ b/tests/test_config_file.py @@ -1,3 +1,4 @@ +from tests.helpers.utils import * """ Tests for the cortex CLI config file """ @@ -9,7 +10,6 @@ # Additionally, order is VERY IMPORTANT in this file because of the way CORTEX_API key is # deleted, set to invalid values, etc. Moving test order could impact the overall success # of pytest. Tread carefully here. -from cortexapps_cli.cortex import cli import io import os @@ -32,66 +32,25 @@ def test_config_file_api_key_quotes(tmp_path): api_key = "${cortex_api_key}" """) content = template.substitute(cortex_api_key=cortex_api_key) - print(content) f.write_text(content) - cli(["-c", str(f), "teams", "list"]) + cli(["-c", str(f), "entity-types", "list"]) @pytest.mark.serial -def test_environment_variables(capsys): - cli(["catalog", "list"]) - out, err = capsys.readouterr() - print("ERR = " + err) - assert err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY", "Warning should be displayed by default" - - cli(["-q", "catalog", "list"]) - out, err = capsys.readouterr() - assert not(err.partition('\n')[0] == "WARNING: tenant setting overidden by CORTEX_API_KEY"), "Warning should be displayed with -q option" - -@pytest.mark.serial -def test_config_file_create(monkeypatch, tmp_path, delete_cortex_api_key): - with pytest.raises(SystemExit) as excinfo: - monkeypatch.setattr('sys.stdin', io.StringIO('Y')) - f = tmp_path / "test-config.txt" - cli(["-c", str(f), "catalog", "list"]) +def test_config_file_create(monkeypatch, tmp_path): + monkeypatch.setattr('sys.stdin', io.StringIO('y')) + f = tmp_path / "test-config.txt" + response = cli(["-c", str(f), "-k", os.getenv('CORTEX_API_KEY'), "scorecards", "list"]) + assert any(scorecard['tag'] == 'cli-test-scorecard' for scorecard in response['scorecards']), "Should find scorecard with tag cli-test-scorecard" @pytest.mark.serial -def test_config_file_new(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config" - content = """ - [default] - api_key = REPLACE_WITH_YOUR_CORTEX_API_KEY - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - -@pytest.mark.serial -def test_export(capsys, delete_cortex_api_key): - cli(["-t", "rich-sandbox", "backup", "export"]) - out, err = capsys.readouterr() - last_line = out.strip().split("\n")[-1] - sys.stdout.write(out + "\n\n") - sys.stdout.write(last_line + "\n\n") - assert "rich-sandbox" in out - -@pytest.mark.serial -def test_config_file_bad_api_key(tmp_path, capsys, delete_cortex_api_key): - f = tmp_path / "cortex_config_bad_api_key" - content = """ - [default] - api_key = invalidApiKey - """ - f.write_text(content) - with pytest.raises(SystemExit) as excinfo: - cli(["-c", str(f), "teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" +def test_config_file_bad_api_key(monkeypatch, tmp_path, delete_cortex_api_key): + monkeypatch.setattr('sys.stdin', io.StringIO('y')) + f = tmp_path / "test-config-bad-api-key.txt" + response = cli(["-c", str(f), "-k", "invalidApiKey", "scorecards", "list"], return_type=ReturnType.RAW) + assert "401 Client Error: Unauthorized" in str(response), "should get Unauthorized error" @pytest.mark.serial -def test_environment_variable_invalid_key(capsys): - with pytest.raises(SystemExit) as excinfo: - os.environ["CORTEX_API_KEY"] = "invalidKey" - cli(["teams", "list"]) - out, err = capsys.readouterr() - assert err.partition('\n')[0] == "Unauthorized", "Invalid api key should show Unauthorized message" +def test_environment_variable_invalid_key(): + os.environ["CORTEX_API_KEY"] = "invalidKey" + response = cli(["scorecards", "list"], return_type=ReturnType.RAW) + assert "401 Client Error: Unauthorized" in str(response), "should get Unauthorized error" diff --git a/tests/test_custom_data_create_or_update_in_bulk.py b/tests/test_custom_data_create_or_update_in_bulk.py index 5867aa4..d033207 100644 --- a/tests/test_custom_data_create_or_update_in_bulk.py +++ b/tests/test_custom_data_create_or_update_in_bulk.py @@ -3,10 +3,10 @@ def test(): cli(["custom-data", "bulk", "-f", "data/run-time/custom-data-bulk.json"]) - result = cli(["catalog", "details", "-t", "test-service-caller"]) + result = cli(["catalog", "details", "-t", "cli-test-service-caller"]) list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-1"] assert list[0]['value'] == "value-1" - result = cli( ["catalog", "details", "-t", "test-service-callee"]) + result = cli( ["catalog", "details", "-t", "cli-test-service-callee"]) list = [metadata for metadata in result['metadata'] if metadata['key'] == "bulk-key-4"] assert list[0]['value'] == "value-4" diff --git a/tests/test_custom_data_delete.py b/tests/test_custom_data_delete.py index 12bc7c9..a787c13 100644 --- a/tests/test_custom_data_delete.py +++ b/tests/test_custom_data_delete.py @@ -1,14 +1,12 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + cli(["custom-data", "add", "-t", "cli-test-service", "-f", "data/run-time/custom-data-delete.json"]) - cli(["custom-data", "add", "-t", "test-service", "-f", "data/run-time/custom-data-delete.json"]) - - result = cli(["custom-data", "get", "-t", "test-service", "-k", "delete-me"]) + result = cli(["custom-data", "get", "-t", "cli-test-service", "-k", "delete-me"]) assert result['value'] == "yes" - cli(["custom-data", "delete", "-t", "test-service", "-k", "delete-me"]) + cli(["custom-data", "delete", "-t", "cli-test-service", "-k", "delete-me"]) - result = cli(["catalog", "details", "-t", "test-service"]) + result = cli(["catalog", "details", "-t", "cli-test-service"]) assert not any(metadata['key'] == 'delete-me' for metadata in result['metadata']) diff --git a/tests/test_custom_data_list.py b/tests/test_custom_data_list.py index 01f97e3..dca0e8d 100644 --- a/tests/test_custom_data_list.py +++ b/tests/test_custom_data_list.py @@ -1,8 +1,6 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - result = cli(["catalog", "details", "-t", "test-service"]) + result = cli(["catalog", "details", "-t", "cli-test-service"]) list = [metadata for metadata in result['metadata'] if metadata['key'] == "cicd"] assert list[0]['value'] == "circle-ci" diff --git a/tests/test_custom_events_list.py b/tests/test_custom_events_list.py index fff7c50..cc0a555 100644 --- a/tests/test_custom_events_list.py +++ b/tests/test_custom_events_list.py @@ -1,16 +1,14 @@ from tests.helpers.utils import * def test(): + cli(["custom-events", "delete-all", "-t", "cli-test-service", "-y", "VALIDATE_SERVICE"]) + cli(["custom-events", "create", "-t", "cli-test-service", "-f", "data/run-time/custom-events.json"]) - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - cli(["custom-events", "delete-all", "-t", "test-service", "-y", "VALIDATE_SERVICE"]) - cli(["custom-events", "create", "-t", "test-service", "-f", "data/run-time/custom-events.json"]) - - result = cli(["custom-events", "list", "-t", "test-service"]) + result = cli(["custom-events", "list", "-t", "cli-test-service"]) assert result['events'][0]['type'] == "VALIDATE_SERVICE" - result = cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE"]) + result = cli(["custom-events", "list", "-t", "cli-test-service", "-y", "VALIDATE_SERVICE"]) assert result['events'][0]['type'] == "VALIDATE_SERVICE" - result = cli(["custom-events", "list", "-t", "test-service", "-y", "VALIDATE_SERVICE", "-ts", "2023-10-10T13:27:51"]) + result = cli(["custom-events", "list", "-t", "cli-test-service", "-y", "VALIDATE_SERVICE", "-ts", "2023-10-10T13:27:51"]) assert result['events'][0]['type'] == "VALIDATE_SERVICE" diff --git a/tests/test_custom_events_uuid.py b/tests/test_custom_events_uuid.py index 167eec6..40842d4 100644 --- a/tests/test_custom_events_uuid.py +++ b/tests/test_custom_events_uuid.py @@ -1,26 +1,24 @@ from tests.helpers.utils import * def test(): - result = cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - - result = cli(["custom-events", "create", "-t", "test-service", "-f", "data/run-time/custom-events-configure.json"]) + result = cli(["custom-events", "create", "-t", "cli-test-service", "-f", "data/run-time/custom-events-configure.json"]) uuid = result['uuid'] - result = cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid]) + result = cli(["custom-events", "get-by-uuid", "-t", "cli-test-service", "-u", uuid]) assert result['type'] == "CONFIG_SERVICE" - cli(["custom-events", "update-by-uuid", "-t", "test-service", "-u", uuid, "-f", "data/run-time/custom-events.json"]) + cli(["custom-events", "update-by-uuid", "-t", "cli-test-service", "-u", uuid, "-f", "data/run-time/custom-events.json"]) - result = cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid]) + result = cli(["custom-events", "get-by-uuid", "-t", "cli-test-service", "-u", uuid]) assert result['type'] == "VALIDATE_SERVICE" - cli(["custom-events", "delete-by-uuid", "-t", "test-service", "-u", uuid]) + cli(["custom-events", "delete-by-uuid", "-t", "cli-test-service", "-u", uuid]) # Custom event was deleted, so verify it cannot be retrieved. # with pytest.raises(SystemExit) as excinfo: - result = cli(["custom-events", "get-by-uuid", "-t", "test-service", "-u", uuid], ReturnType.RAW) + result = cli(["custom-events", "get-by-uuid", "-t", "cli-test-service", "-u", uuid], ReturnType.RAW) out = result.stdout assert "HTTP Error 404: Not Found" in out, "An HTTP 404 error code should be thrown" assert result.exit_code == 1 - cli(["custom-events", "delete-all", "-t", "test-service"]) + cli(["custom-events", "delete-all", "-t", "cli-test-service"]) diff --git a/tests/test_custom_metrics.py b/tests/test_custom_metrics.py index 35bd579..7074b39 100644 --- a/tests/test_custom_metrics.py +++ b/tests/test_custom_metrics.py @@ -2,17 +2,28 @@ # As part of this testing, filed: # CET-19691: custom metrics POST API returns 200 response for un-processed metrics older than 6 months + def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"], ReturnType.STDOUT) + + if "HTTP Error 403: Product access to [ENG_METRICS] not permitted" in result: + print("API key does not have access to custom metrics or feature not enabled in tenant, not running tests.") + return + + # No API support to create a custom metric. It can only be done in the UI, so check if this workspace has the + # 'vulnerabilities' custom metric defined. + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"], ReturnType.STDOUT) + if "HTTP Error 404: Not Found - CustomMetricKey not found" in result: + print("Custom metric named 'vulnerabilities' does not exist. It has to be created in the UI for this test to run.") + print("To create: Settings -> Eng Intelligence -> General -> Custom -> Add Metric -> (select API toggle).") + return date = today() - cli(["custom-metrics", "delete", "-t", "test-service", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) - cli(["custom-metrics", "add", "-t", "test-service", "-k", "vulnerabilities", "-v", "3.0"]) - result = cli(["custom-metrics", "get", "-t", "test-service", "-k", "vulnerabilities"]) + cli(["custom-metrics", "delete", "-t", "cli-test-service", "-k", "vulnerabilities", "-s", "2022-01-01T00:00:00", "-e", today()]) + cli(["custom-metrics", "add", "-t", "cli-test-service", "-k", "vulnerabilities", "-v", "3.0"]) + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"]) assert result['data'][0]['value'] == 3.0, "should have single value of 3.0" - cli(["custom-metrics", "add-in-bulk", "-t", "test-service", "-k", "vulnerabilities", "-v", f"{date}=1.0", "-v", f"{date}=2.0"]) - result = cli(["custom-metrics", "get", "-t", "test-service", "-k", "vulnerabilities"]) + cli(["custom-metrics", "add-in-bulk", "-t", "cli-test-service", "-k", "vulnerabilities", "-v", f"{date}=1.0", "-v", f"{date}=2.0"]) + result = cli(["custom-metrics", "get", "-t", "cli-test-service", "-k", "vulnerabilities"]) assert result['total'] == 3, "should have total of 3 metrics data points" - print("There is not a good way to test this today because there is a pre-requisite that the custom metric already exists.") - print("If you manually create the custom metric named 'vulnerabilities' you can run these tests.") diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 9bd3099..a9b0eb7 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,11 +1,8 @@ from tests.helpers.utils import * def test(): - callerTag = "test-service-caller" - calleeTag = "test-service-callee" - - cli(["catalog", "create", "-f", "data/run-time/test-service-caller.yaml"]) - cli(["catalog", "create", "-f", "data/run-time/test-service-callee.yaml"]) + callerTag = "cli-test-service-caller" + calleeTag = "cli-test-service-callee" cli(["dependencies", "delete-all", "-r", callerTag]) @@ -13,16 +10,16 @@ def test(): cli(["dependencies", "create", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs"]) cli(["dependencies", "update", "-r", callerTag, "-e", calleeTag, "-m", "GET", "-p", "/api/v1/audit-logs", "-f", "data/run-time/dependencies-update.json"]) - result = cli(["dependencies", "get", "-r", "test-service-caller", "-e", "test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) + result = cli(["dependencies", "get", "-r", "cli-test-service-caller", "-e", "cli-test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) assert result["callerTag"] == callerTag, "callerTag should be " + callerTag assert result["calleeTag"] == calleeTag, "calleeTag should be " + calleeTag - cli(["dependencies", "get", "-r", "test-service-caller", "-e", "test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) + cli(["dependencies", "get", "-r", "cli-test-service-caller", "-e", "cli-test-service-callee", "-m", "GET", "-p", "/api/v1/github/configurations"]) - result = cli(["dependencies", "get-all", "-r", "test-service-caller", "-o"]) + result = cli(["dependencies", "get-all", "-r", "cli-test-service-caller", "-o"]) assert any(dependency['callerTag'] == callerTag and dependency['path'] == "/api/v1/github/configurations" for dependency in result["dependencies"]) - cli(["dependencies", "delete", "-r", "test-service-caller", "-e", "test-service-callee", "-m", "GET", "-p", "/api/v1/audit-logs"]) + cli(["dependencies", "delete", "-r", "cli-test-service-caller", "-e", "cli-test-service-callee", "-m", "GET", "-p", "/api/v1/audit-logs"]) cli(["dependencies", "add-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) cli(["dependencies", "delete-in-bulk", "-f", "data/run-time/dependencies-bulk.json"]) - cli(["dependencies", "delete-all", "-r", "test-service-caller"]) + cli(["dependencies", "delete-all", "-r", "cli-test-service-caller"]) diff --git a/tests/test_deploys.py b/tests/test_deploys.py index 38d89e7..5583489 100644 --- a/tests/test_deploys.py +++ b/tests/test_deploys.py @@ -1,49 +1,48 @@ from tests.helpers.utils import * def _add_deploy(): - cli(["deploys", "add", "-t", "test-service", "-f", "data/run-time/deploys.json"]) + cli(["deploys", "add", "-t", "cli-test-service", "-f", "data/run-time/deploys.json"]) def _delete_all(): cli(["deploys", "delete-all"]) - result = cli(["deploys", "list", "-t", "test-service"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) assert len(result['deployments']) == 0, "All deployments for entity should be deleted" def test_deploys(): _delete_all() - result = cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - result = cli(["deploys", "add", "-t", "test-service", "-f", "data/run-time/deploys-uuid.json"]) + result = cli(["deploys", "add", "-t", "cli-test-service", "-f", "data/run-time/deploys-uuid.json"]) uuid = result['uuid'] print("uuid = " + uuid) _add_deploy() - result = cli(["deploys", "list", "-t", "test-service"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) assert any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should find a deploy with uuid" assert result['total'] == 2, "Two deploys should be returned for entity" - cli(["deploys", "update-by-uuid", "-t", "test-service", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) - result = cli(["deploys", "list", "-t", "test-service"]) + cli(["deploys", "update-by-uuid", "-t", "cli-test-service", "-u", uuid, "-f", "data/run-time/deploys-update.json"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-456789", "Should find a deploy with sha" - cli(["deploys", "delete-by-uuid", "-t", "test-service", "-u", uuid]) - result = cli(["deploys", "list", "-t", "test-service"]) + cli(["deploys", "delete-by-uuid", "-t", "cli-test-service", "-u", uuid]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) assert not any(deploy['uuid'] == uuid for deploy in result['deployments']), "Should not find a deploy with uuid" assert result['total'] == 1, "Following delete-by-uuid, only one deploy should be returned for entity" _add_deploy() - cli(["deploys", "delete", "-t", "test-service", "-s", "SHA-123456"]) - result = cli(["deploys", "list", "-t", "test-service"]) + cli(["deploys", "delete", "-t", "cli-test-service", "-s", "SHA-123456"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) assert not any(deploy['sha'] == "SHA-123456" for deploy in result['deployments']), "Should not find a deploy with sha that was deleted" _add_deploy() cli(["deploys", "delete-by-filter", "-ty", "DEPLOY"]) - result = cli(["deploys", "list", "-t", "test-service"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) assert not any(deploy['type'] == "DEPLOY" for deploy in result['deployments']), "Should not find a deploy type 'DEPLOY' that was deleted" result = cli(["deploys", "add", - "-t", "test-service", + "-t", "cli-test-service", "--email", "julien@tpb.com", "--name", "Julien", "--environment", "PYPI.org", @@ -54,7 +53,7 @@ def test_deploys(): "-c", "abc=123", "-c", "def=456"]) uuid = result['uuid'] - result = cli(["deploys", "list", "-t", "test-service"]) + result = cli(["deploys", "list", "-t", "cli-test-service"]) deploy = [deploy for deploy in result['deployments'] if deploy['uuid'] == uuid] assert deploy[0]['sha'] == "SHA-123456", "Should find a deploy with sha" assert deploy[0]['deployer']['email'] == "julien@tpb.com", "Email should be set for deploy" diff --git a/tests/test_entity_types.py b/tests/test_entity_types.py index 713f3a8..8b2d65b 100644 --- a/tests/test_entity_types.py +++ b/tests/test_entity_types.py @@ -1,19 +1,17 @@ from tests.helpers.utils import * def test_resource_definitions(capsys): - cli(["entity-types", "create", "-f", "data/run-time/create-entity-type-empty-schema.json"]) - response = cli(["entity-types", "list"]) entity_types = response['definitions'] - assert any(definition['type'] == 'cli-test-empty-schema' for definition in entity_types), "Should find entity type named 'cli-test-empty-schema'" + assert any(definition['type'] == 'cli-test' for definition in entity_types), "Should find entity type named 'cli-test'" - if any(definition['type'] == 'cli-test-empty-schema' for definition in entity_types): - cli(["entity-types", "delete", "-t", "cli-test-empty-schema"]) - cli(["entity-types", "create", "-f", "data/run-time/create-entity-type-empty-schema.json"]) + if any(definition['type'] == 'cli-test' for definition in entity_types): + cli(["entity-types", "delete", "-t", "cli-test"]) + cli(["entity-types", "create", "-f", "data/import/entity-types/cli-test.json"]) response = cli(["entity-types", "list"]) - assert any(definition['type'] == 'cli-test-empty-schema' for definition in response['definitions']), "Should find entity type named 'cli-test-empty-schema'" + assert any(definition['type'] == 'cli-test' for definition in response['definitions']), "Should find entity type named 'cli-test'" - cli(["entity-types", "get", "-t", "cli-test-empty-schema"]) + cli(["entity-types", "get", "-t", "cli-test"]) - cli(["entity-types", "update", "-t", "cli-test-empty-schema", "-f", "data/run-time/update-entity-type-empty-schema.json"]) + cli(["entity-types", "update", "-t", "cli-test", "-f", "data/run-time/entity-type-update.json"]) diff --git a/tests/test_export.py b/tests/test_export.py new file mode 100644 index 0000000..dcc4470 --- /dev/null +++ b/tests/test_export.py @@ -0,0 +1,4 @@ +from tests.helpers.utils import * + +def test(): + response = cli(["backup", "export", "-e", "workflows,scorecards"], ReturnType.STDOUT) diff --git a/tests/test_gitops_logs.py b/tests/test_gitops_logs.py index d4aab20..f2f53fc 100644 --- a/tests/test_gitops_logs.py +++ b/tests/test_gitops_logs.py @@ -1,9 +1,13 @@ from tests.helpers.utils import * -# This just ensures getting all logs does not fail. Could probably get rid of this test. def test_gitops_logs_get(): cli(["gitops-logs", "get"]) def test_gitops_logs_page_size(capsys): - response = cli(["gitops-logs", "get", "-p", "1", "-z", "5"]) - assert len(response['logs']) == 5, "Changing page size should return requested amount of entries" + response = cli(["gitops-logs", "get", "-p", "0", "-z", "1"]) + # Only run assert if there is at least one entry in the gitops logs + if response['totalPages'] > 0: + assert len(response['logs']) == 1, "Changing page size should return requested amount of entries" + else: + print("No gitops logs. Not running assertion test.") + diff --git a/tests/test_groups.py b/tests/test_groups.py index b8fda33..cae49c2 100644 --- a/tests/test_groups.py +++ b/tests/test_groups.py @@ -1,10 +1,10 @@ from tests.helpers.utils import * def test_groups(): - cli(["groups", "add", "-t", "test-service", "-g", "test-group-2,test-group-3"]) - response = cli(["groups", "get", "-t", "test-service"]) - assert any(group['tag'] == 'test-group-2' for group in response['groups']), "Should find group named test-group-2 in entity test-service" + cli(["groups", "add", "-t", "cli-test-service", "-g", "test-group-2,test-group-3"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) + assert any(group['tag'] == 'test-group-2' for group in response['groups']), "Should find group named test-group-2 in entity cli-test-service" - cli(["groups", "delete", "-t", "test-service", "-g", "test-group-2,test-group-3"]) - response = cli(["groups", "get", "-t", "test-service"]) - assert not(any(group['tag'] == 'test-group-2' for group in response['groups'])), "After delete, should not find group named test-group-2 in entity test-service" + cli(["groups", "delete", "-t", "cli-test-service", "-g", "test-group-2,test-group-3"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) + assert not(any(group['tag'] == 'test-group-2' for group in response['groups'])), "After delete, should not find group named test-group-2 in entity cli-test-service" diff --git a/tests/test_groups_input_file.py b/tests/test_groups_input_file.py index 306e376..687be38 100644 --- a/tests/test_groups_input_file.py +++ b/tests/test_groups_input_file.py @@ -1,16 +1,15 @@ from tests.helpers.utils import * def test(): - cli(["catalog", "create", "-f", "data/run-time/test-service.yaml"]) - cli(["groups", "add", "-t", "test-service", "-f", "tests/test-groups.json"]) + cli(["groups", "add", "-t", "cli-test-service", "-f", "tests/test-groups.json"]) - cli(["groups", "add", "-t", "test-service", "-f", "tests/test-groups.json"]) - response = cli(["groups", "get", "-t", "test-service"]) + cli(["groups", "add", "-t", "cli-test-service", "-f", "tests/test-groups.json"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) assert any(group['tag'] == 'group1' for group in response['groups']), "should find group1 in list of groups" assert any(group['tag'] == 'group2' for group in response['groups']), "should find group2 in list of groups" - cli(["groups", "delete", "-t", "test-service", "-f", "tests/test-groups.json"]) - response = cli(["groups", "get", "-t", "test-service"]) + cli(["groups", "delete", "-t", "cli-test-service", "-f", "tests/test-groups.json"]) + response = cli(["groups", "get", "-t", "cli-test-service"]) assert not(any(group['tag'] == 'group1' for group in response['groups'])), "should not find group1 in list of groups" assert not(any(group['tag'] == 'group2' for group in response['groups'])), "should not find group2 in list of groups" diff --git a/tests/test_import.py b/tests/test_import.py new file mode 100644 index 0000000..1e47836 --- /dev/null +++ b/tests/test_import.py @@ -0,0 +1,6 @@ +from tests.helpers.utils import * + +@pytest.mark.setup +def test(): + response = cli(["backup", "import", "-d", "data/import"], return_type=ReturnType.STDOUT) + print(response) diff --git a/tests/test_ip_allowlist.py b/tests/test_ip_allowlist.py index ea66382..7004cca 100644 --- a/tests/test_ip_allowlist.py +++ b/tests/test_ip_allowlist.py @@ -2,7 +2,7 @@ import requests def test(capsys, tmp_path): - ip_address = requests.get("https://ip.me").text.strip() + ip_address = "0.0.0.0/0" ip_param = ip_address + ":My current IP" cli(["ip-allowlist", "validate", "-a", ip_param]) cli(["ip-allowlist", "replace", "-a", ip_param]) diff --git a/tests/test_packages.py b/tests/test_packages.py index 4a3b829..876f28e 100644 --- a/tests/test_packages.py +++ b/tests/test_packages.py @@ -1,37 +1,37 @@ from tests.helpers.utils import * def test_packages(): - cli(["packages", "go", "upload", "-t", "test-service", "-f", "tests/test_packages_go.sum"]) + cli(["packages", "go", "upload", "-t", "cli-test-service", "-f", "tests/test_packages_go.sum"]) - cli(["packages", "java", "upload-single", "-t", "test-service", "-f", "tests/test_packages_java_single.json"]) + cli(["packages", "java", "upload-single", "-t", "cli-test-service", "-f", "tests/test_packages_java_single.json"]) - cli(["packages", "java", "upload-multiple", "-t", "test-service", "-f", "tests/test_packages_java_multiple.json"]) + cli(["packages", "java", "upload-multiple", "-t", "cli-test-service", "-f", "tests/test_packages_java_multiple.json"]) # upload-pipfile will replace any existing PYTHON package entries for an entity. It's assumed you will use either # pipfile.lock or requirements.txt, but not both. # So we need to test here because these packages will be overwritten by the upload-requirements command. - cli(["packages", "python", "upload-pipfile", "-t", "test-service", "-f", "tests/test_packages_python_pipfile.lock"]) - response = cli(["packages", "list", "-t", "test-service"]) + cli(["packages", "python", "upload-pipfile", "-t", "cli-test-service", "-f", "tests/test_packages_python_pipfile.lock"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) assert any(package['name'] == 'certifi' and package['packageType'] == "PYTHON" for package in response), "Should find Python pipfile package" - cli(["packages", "python", "upload-requirements", "-t", "test-service", "-f", "tests/test_packages_python_requirements.txt"]) + cli(["packages", "python", "upload-requirements", "-t", "cli-test-service", "-f", "tests/test_packages_python_requirements.txt"]) # Similar store for Node as Python. Only one file type is supported. - cli(["packages", "node", "upload-package-json", "-t", "test-service", "-f", "tests/test_packages_node_package.json"]) - response = cli(["packages", "list", "-t", "test-service"]) + cli(["packages", "node", "upload-package-json", "-t", "cli-test-service", "-f", "tests/test_packages_node_package.json"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) assert any(package['name'] == 'clean-css' and package['packageType'] == "NODE" for package in response), "Should find Node package.json package" - cli(["packages", "node", "upload-package-lock", "-t", "test-service", "-f", "tests/test_packages_node_package_lock.json"]) - response = cli(["packages", "list", "-t", "test-service"]) + cli(["packages", "node", "upload-package-lock", "-t", "cli-test-service", "-f", "tests/test_packages_node_package_lock.json"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) assert any(package['name'] == '@angular/common' and package['packageType'] == "NODE" for package in response), "Should find Node package.lock package" - cli(["packages", "node", "upload-yarn-lock", "-t", "test-service", "-f", "tests/test_packages_node_yarn.lock"]) + cli(["packages", "node", "upload-yarn-lock", "-t", "cli-test-service", "-f", "tests/test_packages_node_yarn.lock"]) - cli(["packages", "nuget", "upload-packages-lock", "-t", "test-service", "-f", "tests/test_packages_nuget_packages_lock.json"]) + cli(["packages", "nuget", "upload-packages-lock", "-t", "cli-test-service", "-f", "tests/test_packages_nuget_packages_lock.json"]) - cli(["packages", "nuget", "upload-csproj", "-t", "test-service", "-f", "tests/test_packages_nuget.csproj"]) + cli(["packages", "nuget", "upload-csproj", "-t", "cli-test-service", "-f", "tests/test_packages_nuget.csproj"]) - response = cli(["packages", "list", "-t", "test-service"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) assert any(package['name'] == 'github.com/cortex.io/catalog' and package['packageType'] == "GO" for package in response), "Should find GO package" assert any(package['name'] == 'io.cortex.scorecards' and package['packageType'] == "JAVA" for package in response), "Should find single-updated Java package" assert any(package['name'] == 'io.cortex.teams' and package['packageType'] == "JAVA" for package in response), "Should find multiple-update Java package" @@ -39,18 +39,18 @@ def test_packages(): assert any(package['name'] == '@types/babylon' and package['packageType'] == "NODE" for package in response), "Should find Node yarn.lock package" assert any(package['name'] == 'MicroBuild.Core' and package['packageType'] == "NUGET" for package in response), "Should find NuGet package" - cli(["packages", "go", "delete", "-t", "test-service", "-n", "github.com/cortex.io/catalog"]) + cli(["packages", "go", "delete", "-t", "cli-test-service", "-n", "github.com/cortex.io/catalog"]) - cli(["packages", "java", "delete", "-t", "test-service", "-n", "io.cortex.scorecards"]) - cli(["packages", "java", "delete", "-t", "test-service", "-n", "io.cortex.teams"]) + cli(["packages", "java", "delete", "-t", "cli-test-service", "-n", "io.cortex.scorecards"]) + cli(["packages", "java", "delete", "-t", "cli-test-service", "-n", "io.cortex.teams"]) - cli(["packages", "python", "delete", "-t", "test-service", "-n", "cycler"]) + cli(["packages", "python", "delete", "-t", "cli-test-service", "-n", "cycler"]) - cli(["packages", "node", "delete", "-t", "test-service", "-n", "@types/babylon"]) + cli(["packages", "node", "delete", "-t", "cli-test-service", "-n", "@types/babylon"]) - cli(["packages", "nuget", "delete", "-t", "test-service", "-n", "MicroBuild.Core"]) + cli(["packages", "nuget", "delete", "-t", "cli-test-service", "-n", "MicroBuild.Core"]) - response = cli(["packages", "list", "-t", "test-service"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) assert not any(package['name'] == 'github.com/cortex.io/catalog' and package['packageType'] == "GO" for package in response), "Should not find deleted GO package" @@ -63,6 +63,6 @@ def test_packages(): assert not any(package['name'] == 'MicroBuild.Core' and package['packageType'] == "NUGET" for package in response), "Should not find deleted NuGet package" - cli(["packages", "delete-all", "-t", "test-service"]) - response = cli(["packages", "list", "-t", "test-service"]) + cli(["packages", "delete-all", "-t", "cli-test-service"]) + response = cli(["packages", "list", "-t", "cli-test-service"]) assert len(response) == 0, "Should not find any packages after delete-all" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index affccd7..c77e6fe 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -3,17 +3,17 @@ def test(): response = cli(["plugins", "list"]) - if any(plugin['tag'] == 'my-test-plugin' for plugin in response['plugins']): - cli(["plugins", "delete", "-t", "my-test-plugin"]) + if any(plugin['tag'] == 'cli-test-plugin' for plugin in response['plugins']): + cli(["plugins", "delete", "-t", "cli-test-plugin"]) - cli(["plugins", "create", "-f", "tests/test_plugins.json"]) + cli(["plugins", "create", "-f", "data/import/plugins/cli-test-plugin.json"]) response = cli(["plugins", "list"]) - assert any(plugin['tag'] == 'my-test-plugin' for plugin in response['plugins']), "Plugin named my-test-plugin should be in list of plugins" + assert any(plugin['tag'] == 'cli-test-plugin' for plugin in response['plugins']), "Plugin named cli-test-plugin should be in list of plugins" - cli(["plugins", "replace", "-t", "my-test-plugin", "-f", "tests/test_plugins_update.json"]) - response = cli(["plugins", "get", "-t", "my-test-plugin"]) - assert response['tag'] == "my-test-plugin", "Plugin named my-test-plugin should be returned by get" + cli(["plugins", "replace", "-t", "cli-test-plugin", "-f", "tests/test_plugins_update.json"]) + response = cli(["plugins", "get", "-t", "cli-test-plugin"]) + assert response['tag'] == "cli-test-plugin", "Plugin named cli-test-plugin should be returned by get" - cli(["plugins", "delete", "-t", "my-test-plugin"]) + cli(["plugins", "delete", "-t", "cli-test-plugin"]) response = cli(["plugins", "list"]) - assert not(any(plugin['tag'] == 'my-test-plugin' for plugin in response['plugins'])), "Plugin named my-test-plugin should have been deleted" + assert not(any(plugin['tag'] == 'cli-test-plugin' for plugin in response['plugins'])), "Plugin named cli-test-plugin should have been deleted" diff --git a/tests/test_scim.py b/tests/test_scim.py index 46b4c0f..3215747 100644 --- a/tests/test_scim.py +++ b/tests/test_scim.py @@ -1,20 +1,34 @@ from tests.helpers.utils import * +from urllib.error import HTTPError +import pytest -# These tests work against jeff-sandbox tenant. -# Need a plan to figure out how we can ensure we have SCIM enabled in an environment to not skip these tests. def test(): - response = cli(["scim", "list"]) - assert any(user['userName'] == 'jeff.schnitter@proton.me' for user in response['Resources']), "Should find user jeff.schnitter@proton.me" + response = cli(["scim", "list"], ReturnType.STDOUT) - response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me"]) - assert response['Resources'][0]['userName'] == 'jeff.schnitter@proton.me', "Should find user jeff.schnitter@proton.me" - id = response['Resources'][0]['id'] + if "HTTP Error 403" in response: + print("SCIM not set up or API key does not have permissions, not running test.") + print("API should probably return something other than a 403 when SCIM isn't set up") + print("because it's not possible to determine if this is a setup or permissions issue.") + else: + response = cli(["scim", "list"]) + total_results = response['totalResults'] + assert total_results >= 0, "Total results should be returned as an integer" + print("total results = " + str(total_results)) - response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-a", "name.familyName"]) - assert 'familyName' in response['Resources'][0]['name'].keys(), "Should find familyName in response" + if total_results > 0: + assert any(user['userName'] == 'jeff.schnitter@proton.me' for user in response['Resources']), "Should find user jeff.schnitter@proton.me" - response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-e", "name.familyName"]) - assert 'familyName' not in response['Resources'][0]['name'].keys(), "Should not have familyName in response" + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me"]) + assert response['Resources'][0]['userName'] == 'jeff.schnitter@proton.me', "Should find user jeff.schnitter@proton.me" + id = response['Resources'][0]['id'] - response = cli(["scim", "get", "--id", id]) - assert response['id'] == id, "Should find matching id based on query" + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-a", "name.familyName"]) + assert 'familyName' in response['Resources'][0]['name'].keys(), "Should find familyName in response" + + response = cli(["scim", "list", "--filter", "userName eq jeff.schnitter@proton.me", "-e", "name.familyName"]) + assert 'familyName' not in response['Resources'][0]['name'].keys(), "Should not have familyName in response" + + response = cli(["scim", "get", "--id", id]) + assert response['id'] == id, "Should find matching id based on query" + else: + print("Not running any scim tests, which is lucky because I have not thought of a good way to make these tests generic.") diff --git a/tests/test_scorecards.py b/tests/test_scorecards.py index 9431b36..9ae94e3 100644 --- a/tests/test_scorecards.py +++ b/tests/test_scorecards.py @@ -5,46 +5,47 @@ # TODO: check for and revoke any PENDING exemptions. @mock.patch.dict(os.environ, {"CORTEX_API_KEY": os.environ['CORTEX_API_KEY']}) def _get_rule(title): - response = cli(["scorecards", "get", "-s", "test-scorecard"]) + response = cli(["scorecards", "get", "-s", "cli-test-scorecard"]) rule_id = [rule['identifier'] for rule in response['scorecard']['rules'] if rule['title'] == title] return rule_id[0] def test_scorecards(): - cli(["scorecards", "create", "-f", "tests/test_scorecards.yaml"]) + cli(["scorecards", "create", "-f", "data/import/scorecards/cli-test-scorecard.yaml"]) response = cli(["scorecards", "list"]) - assert any(scorecard['tag'] == 'test-scorecard' for scorecard in response['scorecards']), "Should find scorecard with tag test-scorecard" + assert any(scorecard['tag'] == 'cli-test-scorecard' for scorecard in response['scorecards']), "Should find scorecard with tag cli-test-scorecard" - response = cli(["scorecards", "shield", "-s", "test-scorecard", "-t", "test-service"]) + response = cli(["scorecards", "shield", "-s", "cli-test-scorecard", "-t", "cli-test-service"]) assert "img.shields.io" in response['value'], "shields url should be included in string" - response = cli(["scorecards", "get", "-s", "test-scorecard"]) - assert response['scorecard']['tag'] == "test-scorecard", "JSON response should have scorecard tag" + response = cli(["scorecards", "get", "-s", "cli-test-scorecard"]) + assert response['scorecard']['tag'] == "cli-test-scorecard", "JSON response should have scorecard tag" - response = cli(["scorecards", "descriptor", "-s", "test-scorecard"], return_type=ReturnType.STDOUT) + response = cli(["scorecards", "descriptor", "-s", "cli-test-scorecard"], return_type=ReturnType.STDOUT) assert "Used to test Cortex CLI" in response, "description of scorecard found in descriptor" # cannot rely on a scorecard evaluation being complete, so not performing any validation - cli(["scorecards", "next-steps", "-s", "test-scorecard", "-t", "test-service"]) + cli(["scorecards", "next-steps", "-s", "cli-test-scorecard", "-t", "cli-test-service"]) - response = cli(["scorecards", "scores", "-s", "test-scorecard", "-t", "test-service"]) - assert response['scorecardTag'] == "test-scorecard", "Should get valid response that include test-scorecard" + # cannot rely on a scorecard evaluation being complete, so not performing any validation + #response = cli(["scorecards", "scores", "-s", "cli-test-scorecard", "-t", "cli-test-service"]) + #assert response['scorecardTag'] == "cli-test-scorecard", "Should get valid response that include cli-test-scorecard" # # Not sure if we can run this cli right away. Newly-created Scorecard might not be evaluated yet. # # 2024-05-06, additionally now blocked by CET-8882 -# # cli(["scorecards", "scores", "-t", "test-scorecard", "-e", "test-service"]) +# # cli(["scorecards", "scores", "-t", "cli-test-scorecard", "-e", "cli-test-service"]) # -# cli(["scorecards", "scores", "-t", "test-scorecard"]) +# cli(["scorecards", "scores", "-t", "cli-test-scorecard"]) def test_scorecards_drafts(): - cli(["scorecards", "create", "-f", "tests/test_scorecards_draft.yaml"]) + cli(["scorecards", "create", "-f", "data/import/scorecards/cli-test-draft-scorecard.yaml"]) response = cli(["scorecards", "list", "-s"]) - assert any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in response['scorecards']) + assert any(scorecard['tag'] == 'cli-test-draft-scorecard' for scorecard in response['scorecards']) - cli(["scorecards", "delete", "-s", "test-scorecard-draft"]) + cli(["scorecards", "delete", "-s", "cli-test-draft-scorecard"]) response = cli(["scorecards", "list", "-s"]) - assert not(any(scorecard['tag'] == 'test-scorecard-draft' for scorecard in response['scorecards'])), "should not find deleted scorecard" + assert not(any(scorecard['tag'] == 'cli-test-draft-scorecard' for scorecard in response['scorecards'])), "should not find deleted scorecard" # Challenges with testing exemptions: # @@ -66,7 +67,7 @@ def test_scorecards_drafts(): # subsequent test using an ADMIN role to act on the exemption # # So this is how we'll roll for now . . . -# - Automated tests currently run in known tenants that have the 'test-scorecard' in an evaluated state. +# - Automated tests currently run in known tenants that have the 'cli-test-scorecard' in an evaluated state. # - So we can semi-reliably count on an evaluated scorecard to exist. @pytest.fixture(scope='session') @@ -75,7 +76,7 @@ def test_exemption_that_will_be_approved(): rule_id = _get_rule("Has Custom Data") print("rule_id = " + rule_id) - response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test approve", "-ri", rule_id, "-d", "100"]) + response = cli(["scorecards", "exemptions", "request", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "test approve", "-ri", rule_id, "-d", "100"]) assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" @pytest.mark.usefixtures('test_exemption_that_will_be_approved') @@ -83,9 +84,9 @@ def test_approve_exemption(): rule_id = _get_rule("Has Custom Data") print("rule_id = " + rule_id) - response = cli(["scorecards", "exemptions", "approve", "-s", "test-scorecard", "-t", "test-service", "-ri", rule_id]) + response = cli(["scorecards", "exemptions", "approve", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-ri", rule_id]) assert response['exemptions'][0]['exemptionStatus']['status'] == 'APPROVED', "exemption state should be APPROVED" - response = cli(["scorecards", "exemptions", "revoke", "-s", "test-scorecard", "-t", "test-service", "-r", "I revoke you", "-ri", rule_id]) + response = cli(["scorecards", "exemptions", "revoke", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "I revoke you", "-ri", rule_id]) assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" @pytest.fixture(scope='session') @@ -94,7 +95,7 @@ def test_exemption_that_will_be_denied(): rule_id = _get_rule("Is Definitely False") print("rule_id = " + rule_id) - response = cli(["scorecards", "exemptions", "request", "-s", "test-scorecard", "-t", "test-service", "-r", "test deny", "-ri", rule_id, "-d", "100"]) + response = cli(["scorecards", "exemptions", "request", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "test deny", "-ri", rule_id, "-d", "100"]) assert response['exemptionStatus']['status'] == 'PENDING', "exemption state should be PENDING" @pytest.mark.usefixtures('test_exemption_that_will_be_denied') @@ -102,5 +103,5 @@ def test_deny_exemption(): rule_id = _get_rule("Is Definitely False") print("rule_id = " + rule_id) - response = cli(["scorecards", "exemptions", "deny", "-s", "test-scorecard", "-t", "test-service", "-r", "I deny, therefore I am", "-ri", rule_id]) + response = cli(["scorecards", "exemptions", "deny", "-s", "cli-test-scorecard", "-t", "cli-test-service", "-r", "I deny, therefore I am", "-ri", rule_id]) assert response['exemptions'][0]['exemptionStatus']['status'] == 'REJECTED', "exemption state should be REJECTED" diff --git a/tests/test_stdin.py b/tests/test_stdin.py index d1d0e03..82857a3 100644 --- a/tests/test_stdin.py +++ b/tests/test_stdin.py @@ -4,8 +4,8 @@ import subprocess def test_stdin_input(capsys): - cat_process = subprocess.Popen(['cat', 'data/run-time/create-entity.yaml'], stdout=subprocess.PIPE) - cortex_process = subprocess.Popen(['cortexapps_cli/cortex.py', 'catalog', 'create','-f-'],stdin=cat_process.stdout, stdout=subprocess.PIPE) + cat_process = subprocess.Popen(['cat', 'data/import/catalog/cli-test-create-entity.yaml'], stdout=subprocess.PIPE) + cortex_process = subprocess.Popen(['cortexapps_cli/cli.py', 'catalog', 'create','-f-'],stdin=cat_process.stdout, stdout=subprocess.PIPE) out, err = cortex_process.communicate() rc=cortex_process.wait() assert rc == 0, "catalog test with stdin should succeed" diff --git a/tests/test_workflows.py b/tests/test_workflows.py index ccd2c5a..5c046fc 100644 --- a/tests/test_workflows.py +++ b/tests/test_workflows.py @@ -2,11 +2,11 @@ import yaml def test(): - cli(["workflows", "create", "-f", "data/run-time/test-workflows.json"]) + cli(["workflows", "create", "-f", "data/import/workflows/cli-test-workflow.yaml"]) response = cli(["workflows", "list"]) - assert any(workflow['tag'] == 'hello-world' for workflow in response['workflows']), "Should find workflow with tag hello-world" + assert any(workflow['tag'] == 'cli-test-workflow' for workflow in response['workflows']), "Should find workflow with tag cli-test-workflow" - response = cli(["workflows", "get", "-t", "hello-world"]) + response = cli(["workflows", "get", "-t", "cli-test-workflow"]) - response = cli(["workflows", "delete", "-t", "hello-world"]) + response = cli(["workflows", "delete", "-t", "cli-test-workflow"]) From c9d1b39e063b3a72fc4006a414d37ac5ddb37846 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Wed, 11 Jun 2025 15:41:49 -0700 Subject: [PATCH 51/56] Fix syntax error --- cortexapps_cli/commands/initiatives.py | 61 -------------------------- 1 file changed, 61 deletions(-) diff --git a/cortexapps_cli/commands/initiatives.py b/cortexapps_cli/commands/initiatives.py index 6770a76..b3e33bd 100644 --- a/cortexapps_cli/commands/initiatives.py +++ b/cortexapps_cli/commands/initiatives.py @@ -20,67 +20,6 @@ def create( Create an Initiative. API key must have the Edit Initiative permission. """ -description -levels -rules -exclude-groups -include-groups -query -types-include -types-exclude -isdraft -name -notification-disabled -notification-replyto -notification-time-interval -notification-time-unit -scorecard-tag -target-date -{ - "description": "text", - "emphasizedLevels": [ - { - "rank": 1 - } - ], - "emphasizedRules": [ - { - "expression": "text" - } - ], - "filter": { - "groups": { - "exclude": [ - "text" - ], - "include": [ - "text" - ] - }, - "query": "text", - "types": { - "exclude": [ - "text" - ], - "include": [ - "text" - ] - } - }, - "isDraft": true, - "name": "text", - "notificationSchedule": { - "isDisabled": true, - "replyToEmails": [ - "text" - ], - "timeInterval": 1, - "timeUnit": "text" - }, - "scorecardTag": "text", - "targetDate": "2025-06-10" -} - client = ctx.obj["client"] params = { From ac26f9cd01abc9d9ee72c03d2aabd1d316db1aec Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 13 Jun 2025 08:43:58 -0700 Subject: [PATCH 52/56] Minor updates --- README.rst | 257 +++---------------------- cortexapps_cli/commands/backup.py | 35 +++- cortexapps_cli/commands/initiatives.py | 23 ++- cortexapps_cli/commands/plugins.py | 1 - 4 files changed, 73 insertions(+), 243 deletions(-) diff --git a/README.rst b/README.rst index 298df13..51b6154 100644 --- a/README.rst +++ b/README.rst @@ -58,7 +58,7 @@ Config file ---------------------- The CLI requires an API key for all operations. This key is stored in a config file whose default location is `~/.cortex/config`. -This path can be overridden with the `-c` flag. +This path can be overridden with the `-c` flag. You will be prompted to create the file if it does not exist. Minimal contents of the file: @@ -111,74 +111,10 @@ Example: Commands ---------------------- -Run :code:`cortex -h` to see a list of all commands: - -.. code-block: - - usage: cortex CLI [-h] [-a] [-c CONFIG] [-d] [-n] [-t] [-v] - {audit-logs,backup,catalog,custom-data,custom-events,dependencies,deploys,discovery-audit,docs,groups,integrations,ip-allowlist,on-call,packages,plugins,queries,resource-definitions,scorecards,teams-hierarchies,teams} - ... - - Cortex command line interface - - positional arguments: - {audit-logs,backup,catalog,custom-data,custom-events,dependencies,deploys,discovery-audit,docs,groups,integrations,ip-allowlist,on-call,packages,plugins,queries,resource-definitions,scorecards,teams-hierarchies,teams} - sub-command help - audit-logs audit log commands - backup import/export commands - catalog catalog commands - custom-data custom_data actions - custom-events custom events actions - dependencies dependencies commands - deploys deploys commands - discovery-audit Discovery Audit commands - docs OpenAPI doc commands - groups groups commands - integrations integrations sub-commands - ip-allowlist IP Allowlist information - on-call get on-call information - packages commands to create and modify packages - plugins commands to create and access plugins - queries run CQL queries - resource-definitions - resource definitions - scorecards scorecards API requests - teams-hierarchies commands to create and modify team hierarchies - teams commands to create and modify teams - - options: - -h, --help show this help message and exit - -a , --cliAlias get CLI parms from [TENANT.aliases] in config file - -c CONFIG, --config CONFIG - Config location, default = ~/.cortex/config - -d, --debug Writes request debug information as JSON to stderr - -n, --noObfuscate Do not obfuscate bearer token when debugging - -t , --tenant tenant name defined in ~/.cortex/config, defaults to 'default' - -v, --version show program's version number and exit - - Type 'man cortex' for additional details. - +Run :code:`cortex` to see a list of options and sub-commands. Run :code:`cortex -h` to see a list of all commands for each subcommand. -For example: - -.. code:: bash - - cortex audit-logs -h - -.. code-block:: - - usage: cortex CLI audit-logs [-h] {get} ... - - positional arguments: - {get} audit logs help - get retrieve audit logs - - options: - -h, --help show this help message and exit - - =================== Examples =================== @@ -211,21 +147,23 @@ Your cortex config file will require api keys for both tenants. It would look l .. code-block:: - Getting resource definitions - --> my-resource-1 - Getting catalog entities + Getting catalog --> my-domain-1 --> my-service-1 --> my-service-2 - Getting IP Allowlist definitions + Getting entity-types + --> my-entity-type-1 + Getting ip-allowlist + --> ip-allowlist + Getting plugins + --> my-plugin-1 Getting scorecards --> my-scorecard-1 - Getting teams - --> my-team-1 - --> my-team-2 + Getting workflows + --> my-workflow-1 Export complete! - Contents available in /Users/myUser/.cortex/export/2023-11-19-14-58-14 + Contents available in /Users/myUser/.cortex/export/2025-06-12-14-58-14 **Import** @@ -239,139 +177,23 @@ are automatically imported by Cortex. Cortex does not have access to any keys, integration configurations. ---------------------------------------------------------- -Export all services from one tenant; import into another ---------------------------------------------------------- - -This example shows how to export services from a tenant named :code:`myTenant-dev` and import those services into a tenant -named :code:`myTenant`. It is similar to the full export example "`Export from one tenant; import into another`_", but only -exports/imports services. - -Your cortex config file will require api keys for both tenants. It would look like this: - -.. code-block:: - - [myTenant] - api_key = - - [myTenant-dev] - api_key = - - -**Option 1: export service YAMLs to a directory and then import them** - -This option is helpful in case you want to save the entity YAML files. It makes it easy to restart or retry an import -because you will have all YAMLs saved on disk. - -**Export** - -.. code:: bash - - mkdir -p /tmp/cortex-export - cd /tmp/cortex-export - for service in `cortex -t myTenant catalog list -t service | jq -r ".entities[].tag" | sort` - do - cortex -t myTenant catalog descriptor -y -t ${service} > ${service}.yaml - done - -**Import** - -.. code:: bash - - cd /tmp/cortex-export - for file in `ls -1 *.yaml` - do - cortex -t myTenant-dev catalog create -f ${file} - done - -**Option 2: combine the export and import in a single command** - -This option is simpler and doesn't require any disk operations. However, if it fails for any reason you have to run the -entire export/import in its entirety. - -.. code:: bash - - for service in `cortex -t myTenant catalog list -t service | jq -r ".entities[].tag" | sort` - do - echo "Processing service: ${service}" - cortex -t myTenant catalog descriptor -y -t ${service} | cortex -t myTenant-dev catalog create -f- - done - ---------------------------------------------------------- -Export all domains from one tenant; import into another ---------------------------------------------------------- - -This example shows how to export domains from a tenant named :code:`myTenant-dev` and import those domains into a tenant -named :code:`myTenant`. It is similar to the full export example "`Export from one tenant; import into another`_", but only -exports/imports domains. - -Your cortex config file will require api keys for both tenants. It would look like this: - -.. code-block:: - - [myTenant] - api_key = - - [myTenant-dev] - api_key = - - -**Option 1: export domain YAMLs to a directory and then import them** - -This option is helpful in case you want to save the entity YAML files. It makes it easy to restart or retry an import -because you will have all YAMLs saved on disk. - -**Export** - -.. code:: bash - - mkdir -p /tmp/cortex-export - cd /tmp/cortex-export - for domain in `cortex -t myTenant catalog list -t domain | jq -r ".entities[].tag" | sort` - do - echo "creating ${domain}.yaml" - cortex -t myTenant catalog descriptor -y -t ${domain} > ${domain}.yaml - done - -**Import** - -.. code:: bash - - cd /tmp/cortex-export - for file in `ls -1 *.yaml` - do - cortex -t myTenant-dev catalog create -f ${file} - done - -**Option 2: combine the export and import in a single command** - -This option is simpler and doesn't require any disk operations. However, if it fails for any reason you have to run the -entire export/import in its entirety. - -.. code:: bash - - for domain in `cortex -t myTenant catalog list -t domain | jq -r ".entities[].tag" | sort` - do - echo "Processing domain: ${domain}" - cortex -t myTenant catalog descriptor -y -t ${domain} | cortex -t myTenant-dev catalog create -f- - done - - ------------------------ Iterate over all domains ------------------------ .. code:: bash - for domain in `cortex catalog list -t domain | jq -r ".entities[].tag" | sort`; do echo "domain = $domain"; done + for domain in `cortex catalog list -t domain --csv -C tag --sort tag:asc`; do echo "domain = $domain"; done ---------------------- Iterate over all teams ---------------------- +**NOTE:** as of June 2025, requires a feature flag enabled to return team entities in the catalog API. Work with your CSM if you need assistance. + .. code:: bash - for team in `cortex catalog list -t team | jq -r ".entities[].tag" | sort`; do echo "team = $team"; done + for team in `cortex catalog list -t team --csv -C tag --sort tag:asc`; do echo "team = $team"; done ------------------------- Iterate over all services @@ -379,7 +201,7 @@ Iterate over all services .. code:: bash - for service in `cortex catalog list -t service | jq -r ".entities[].tag" | sort`; do echo "service = $service"; done + for service in `cortex catalog list -t service --csv -C tag --sort tag:asc`; do echo "service = $service"; done ----------------------------- Get git details for a service @@ -404,7 +226,7 @@ Add a suffix to all x-cortex-tag values for services .. code:: bash - for service in `cortex catalog list -t service | jq -r ".entities[].tag" | sort`; do + for service in `cortex catalog list -t service --csv -C tag --sort tag:asc`; do cortex catalog descriptor -y -t ${service} | yq '.info.x-cortex-tag |= . + "-suffix"' | cortex catalog create -f- done @@ -435,7 +257,7 @@ Remove a group from domains .. code:: bash - for domain in `cortex catalog list -t domain -g my-old-group | jq -r ".entities[].tag" | sort`; do + for domain in `cortex catalog list -t domain --csv -C tag --sort tag:asc`; do cortex catalog descriptor -y -t ${domain} | yq -e '.info.x-cortex-groups -= [ "my-old-group" ]' | cortex catalog create -f- done @@ -461,7 +283,7 @@ Modify all github basepath values for domain entitities, changing '-' to '_' .. code:: bash - for domain in `cortex catalog list -t domain | jq -r ".entities[].tag"`; do + for domain in `cortex catalog list -t domain --csv -C tag --sort tag:asc`; do cortex catalog descriptor -y -t ${domain} | yq ".info.x-cortex-git.github.basepath |= sub(\"-\", \"_\")" | cortex catalog create -f- done @@ -488,7 +310,7 @@ Create a backup of all scorecards .. code:: bash - for tag in `cortex scorecards list | jq -r ".scorecards[].tag"` + for tag in `cortex scorecards list --csv -C tag` do echo "backing up: ${tag}" cortex scorecards descriptor -t ${tag} > ${tag}.yaml @@ -503,7 +325,7 @@ and it appends " Draft" to the end of the existing title. .. code:: bash - for tag in `cortex scorecards list | jq -r ".scorecards[].tag"` + for tag in `cortex scorecards list --csv -C tag` do cortex scorecards descriptor -t ${tag} | yq '.draft = true | .tag += "-draft" | .name += " Draft"' | cortex scorecards create -f- done @@ -517,7 +339,7 @@ which the drafts were created and delete the drafts. .. code:: bash - for tag in `cortex scorecards list -s | jq -r ".scorecards[].tag" | grep "\-draft$"` + for tag in `cortex scorecards list --csv -C tag --filter tag=.*-draft` do cortex scorecards descriptor -t ${tag} | yq '.draft = false | .tag |= sub("-draft","") | .name |= sub(" Draft", "")' | cortex scorecards create -f- && cortex scorecards delete -t ${tag} done @@ -530,7 +352,7 @@ This recipe is similar to the one above, but it does not create a new scorecard .. code:: bash - for tag in `cortex scorecards list -s | jq -r ".scorecards[].tag" | grep "\-draft$"` + for tag in `cortex scorecards list --csv -C tag --filter tag=.*-draft` do cortex scorecards descriptor -t ${tag} | yq '.draft = false | .tag |= sub("-draft","") | .name |= sub(" Draft", "")' > ${tag}.yaml done @@ -586,37 +408,6 @@ Run this command for two different scorecards and diff the csv files to compare sdiff -s /tmp/scorecard1.csv /tmp/scorecard2.csv ------------------------------------------------------------------------------ -Backup all Workday teams ------------------------------------------------------------------------------ - -This recipe is helpful if you change your Workday report and want to save your existing teams in case you want to restore them. - -For each team it will create two files: -- a JSON file that contains the Workday data -- a Cortex team YAML file that refers to the Workday team - -.. code:: bash - - for team in `cortex teams list | jq -r '.teams[] | select (.type == "IDP") | select (.idpGroup.provider == "WORKDAY") | .teamTag'` - do - cortex teams get -t ${team} > ${team}.json - cortex catalog descriptor -y -t ${team} > ${team}.yaml - done - ------------------------------------------------------------------------------ -Delete all Workday teams ------------------------------------------------------------------------------ - -This recipe is helpful if you want to remove all Workday teams and import from scratch. - -.. code:: bash - - for team in `cortex teams list | jq -r '.teams[] | select (.type == "IDP") | select (.idpGroup.provider == "WORKDAY") | .teamTag'` - do - cortex teams delete -t ${team} - done - ----------------------------------------------------------------------------- Add provider for all group type owners where provider is not listed ----------------------------------------------------------------------------- diff --git a/cortexapps_cli/commands/backup.py b/cortexapps_cli/commands/backup.py index 3443247..fa31212 100644 --- a/cortexapps_cli/commands/backup.py +++ b/cortexapps_cli/commands/backup.py @@ -65,7 +65,7 @@ def _write_file(content, file, is_json=False): f.write(str(content) + "\n") f.close() -def _catalog(ctx, directory, catalog_types): +def _export_catalog(ctx, directory, catalog_types): directory = _directory_name(directory, "catalog") data = catalog.list_descriptors(ctx, types=catalog_types, page_size=1000, yaml="true", _print=False) @@ -176,11 +176,42 @@ def _parse_catalog_types(ctx, catalog_types): def export( ctx: typer.Context, export_types: List[str] = typer.Option(_parse_export_types("all"), "--export-types", "-e", help="some help test", callback=_parse_export_types), - catalog_types: str = typer.Option("all", "--catalog-types", "-c", help="Comma separated list of catalog types to export, defaults to all"), + catalog_types: str = typer.Option("all", "--catalog-types", "-c", help="Comma separated list of catalog types to export, defaults to service,team,domain plus all user-created entity-types"), directory: str = typer.Option(os.path.expanduser('~') + '/.cortex/export/' + datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), "--directory", "-d", help="Location of export directory, defaults to ~/.cortex/export/-tenant"), ): """ Export tenant + + Exports the following objects: + - catalog + - entity-types + - ip-allowlist + - plugins + - scorecards + - workflows + + By default, it does not export any entities that would be created by an integration, for example AWS objects. This is because these + entities are maintained by the integration and do not need to be backed up. + + However, these entities can be export by referencing them in the catalog-types parameter, for example this command + would export all AWS S3 buckets:: + + cortex backup export --export-types catalog --catalog-types AWS::S3::Bucket + + It does not back up everything in the tenant. For example these objects are not backed up: + - api-keys + - custom-events + - custom-metadata created by the public API + - custom-metrics + - dependencies created by the API + - deploys + - docs created by the API + - entity-relationships created by the API + - groups added by the API + - packages + - secrets + + In general, if there is a bulk export API method for a Cortex object, it will be included in the export. """ export_types = sorted(list(set(export_types))) diff --git a/cortexapps_cli/commands/initiatives.py b/cortexapps_cli/commands/initiatives.py index b3e33bd..164bcc3 100644 --- a/cortexapps_cli/commands/initiatives.py +++ b/cortexapps_cli/commands/initiatives.py @@ -21,15 +21,24 @@ def create( """ client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) - params = { - "dryRun": dry_run - } + client.post("api/v1/initiatives", data=data) - # remove any params that are None - params = {k: v for k, v in params.items() if v is not None} +@app.command() +def update( + ctx: typer.Context, + file_input: Annotated[typer.FileText, typer.Option(..., "--file", "-f", help="File containing JSON body of request, can be passed as stdin with -, example: -f-")] = None, + cid: str = typer.Option(..., "--cid", "-c", help="Unique Cortex ID for the initiative"), +): + """ + Update an Initiative. API key must have the Edit Initiative permission. + """ + + client = ctx.obj["client"] + data = json.loads("".join([line for line in file_input])) - client.post("api/v1/initiatives", params=params, data=input.read()) + client.put("api/v1/initiatives/" + cid, data=data) @app.command() def delete( @@ -60,7 +69,7 @@ def list( sort: ListCommandOptions.sort = [], ): """ - List initiatives + List initiatives. API key must have the View Initiatives permission. """ client = ctx.obj["client"] diff --git a/cortexapps_cli/commands/plugins.py b/cortexapps_cli/commands/plugins.py index 8602db9..65d8343 100644 --- a/cortexapps_cli/commands/plugins.py +++ b/cortexapps_cli/commands/plugins.py @@ -88,7 +88,6 @@ def create( data.pop("tag", None) r = client.put("api/v1/plugins/" + tag, data, raw_response=True) else: - #r = client.post("api/v1/plugins", data=file_input.read(), raw_response=True) r = client.post("api/v1/plugins", data, raw_response=True) @app.command() From 20a98db39edddf9df494eda1fcc9d2ed4361357e Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 13 Jun 2025 11:42:05 -0700 Subject: [PATCH 53/56] Update version information --- HISTORY.md | 38 +++++++++++++------------------------- 1 file changed, 13 insertions(+), 25 deletions(-) diff --git a/HISTORY.md b/HISTORY.md index ed8465e..0593ddd 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,37 +1,25 @@ Release History =============== -1.0.0 (2025-0x-yy) +1.0.0 (2025-06-13) ------------------ **Improvements** +- A complete re-write centered around the [typer library](https://typer.tiangolo.com/) and better code modularization - Added sub-commands: + - api-keys + - custom-metrics + - initiatives + - workflows +- Removed sub-commands: + - team hieararchies +- added `--table` and `--csv` options to list commands **Breaking Changes** -- custom-events -i changed to -ts -- plugins get changed to -> list, -- plugins get-by-tag changed to -> get -- resource-definitions -> entity-types - delete -ty -> delete -t - -**TODO** -- DONE: Do a full reconciliation of all flags -- DONE: Add -debug flag -- DONE: Test input files, env vars -- DONE: Add support for adding groups via JSON file? -- DONE: Check all get/list sub-commands - could change get-all to list, but prefer to make no change -- backup export -> don't include cloud entities -- DONE: backup import -> need to complete -- DONE: deleting existing entity types -> would be good to loop over entity types with a certain filter - base initially on name prefix - --> UPDATE: fix was to incorporate force when creating, force will delete existing entity type -- warning about using env vars -- DONE: default cortex_base_url = https://api.getcortexapp.com -- DONE: csv export -> option to not show header line, get rid of EOL - - - +- custom-events -i timestamp changed to -ts timestamp +- plugins `get` command changed to `list` +- plugins `get-by-tag` subcommand changed to `get` +- `resource-definitions` command changed to `entity-types` 0.26.6 (2024-07-30) ------------------ From cdac2d7ce7fbcb8bd3abb0ff3b28baa1951d7187 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 13 Jun 2025 11:52:44 -0700 Subject: [PATCH 54/56] Fix vuln in requests library --- data/run-time/create-dryrun.yaml | 6 ------ poetry.lock | 10 +++++----- pyproject.toml | 2 +- tests/test_catalog_dryrun.py | 14 -------------- 4 files changed, 6 insertions(+), 26 deletions(-) delete mode 100644 data/run-time/create-dryrun.yaml delete mode 100644 tests/test_catalog_dryrun.py diff --git a/data/run-time/create-dryrun.yaml b/data/run-time/create-dryrun.yaml deleted file mode 100644 index 0cf1e9d..0000000 --- a/data/run-time/create-dryrun.yaml +++ /dev/null @@ -1,6 +0,0 @@ -openapi: 3.0.0 -info: - title: Create Entity DryRun - description: Entity that should never be created; only used to test catalog dryRun - x-cortex-tag: create-entity-dryrun - x-cortex-type: service diff --git a/poetry.lock b/poetry.lock index 3ec3983..f803089 100644 --- a/poetry.lock +++ b/poetry.lock @@ -598,19 +598,19 @@ files = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -720,4 +720,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "e527a86daab17d16ce5cc60269f55b4490d3fbf8663400acba9309e0f6e90daf" +content-hash = "e84a00708240cc81582242e25f476b02a336519cda76c4311449d92e590c5898" diff --git a/pyproject.toml b/pyproject.toml index af62d20..3f13bb3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.11" -requests = ">= 2.32.3, < 3" +requests = "^2.32.4" pyyaml = ">= 6.0.1, < 7" urllib3 = ">= 2.2.2" typer = "^0.12.5" diff --git a/tests/test_catalog_dryrun.py b/tests/test_catalog_dryrun.py deleted file mode 100644 index 11020ab..0000000 --- a/tests/test_catalog_dryrun.py +++ /dev/null @@ -1,14 +0,0 @@ -from common import * - -def test(capsys): - cli(["-q", "catalog", "create", "-f", "data/run-time/create-dryrun.yaml", "--dry-run"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - # Entity should not exist. - with pytest.raises(SystemExit) as excinfo: - cli(["catalog", "descriptor", "-t", "create-entity-dryrun"]) - out, err = capsys.readouterr() - - assert out == "Not Found" - assert excinfo.value.code == 404 From 3d5535a413116b1b0e67d3f22f14fc6e6519901d Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 13 Jun 2025 12:34:33 -0700 Subject: [PATCH 55/56] Run apt as root --- .github/workflows/test-pr.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml index 6287db7..536f933 100644 --- a/.github/workflows/test-pr.yml +++ b/.github/workflows/test-pr.yml @@ -37,7 +37,7 @@ jobs: - name: Install dependencies run: | - apt update && apt install just + sudo apt update && sudo apt install just python -m pip install --upgrade pip pip install poetry poetry-audit-plugin pytest-cov pytest pytest-xdist From 3fbbc05dee137e41dca01c1f6f7be8254fff3d45 Mon Sep 17 00:00:00 2001 From: Jeff Schnitter Date: Fri, 13 Jun 2025 14:06:32 -0700 Subject: [PATCH 56/56] Fix catalog patch test --- cortexapps_cli/commands/catalog.py | 10 ++++++++-- .../catalog/cli-test-patch-entity.yaml} | 9 ++++----- data/run-time/patch-entity.yaml | 2 +- tests/test_catalog_patch_entity.py | 17 +++-------------- 4 files changed, 16 insertions(+), 22 deletions(-) rename data/{run-time/create-patch-entity.yaml => import/catalog/cli-test-patch-entity.yaml} (57%) diff --git a/cortexapps_cli/commands/catalog.py b/cortexapps_cli/commands/catalog.py index b4f3ae3..dba4679 100644 --- a/cortexapps_cli/commands/catalog.py +++ b/cortexapps_cli/commands/catalog.py @@ -50,7 +50,11 @@ class CatalogCommandOptions: ] append_arrays = Annotated[ Optional[bool], - typer.Option("--append-arrays", "-aa", help="Default merge behavior is to replace arrays, set this to true to append arrays instead. For simple types, duplicate values will be removed from the merged array", show_default=False) + typer.Option("--append-arrays", "-a", help="Default merge behavior is to replace arrays, set this to true to append arrays instead. For simple types, duplicate values will be removed from the merged array", show_default=False) + ] + fail_if_not_exist = Annotated[ + Optional[bool], + typer.Option("--fail-if-not-exist", "-n", help="Default behavior is to upsert the entity, if set command will fail (404) if the entity specified in x-cortex-tag does not exist.", show_default=False) ] git_repositories = Annotated[ Optional[str], @@ -292,6 +296,7 @@ def patch( delete_marker_value = typer.Option("__delete__", "--delete-marker-value", "-dmv", help="Delete keys with this value from the merged yaml, defaults to __delete__, if any values match this, they will not be included in merged YAML. For example my_value: __delete__ will remove my_value from the merged YAML."), dry_run: CatalogCommandOptions.dry_run = False, append_arrays: CatalogCommandOptions.append_arrays = False, + fail_if_not_exist: CatalogCommandOptions.fail_if_not_exist = False, ): """ Creates or updates an entity. If the YAML refers to an entity that already exists (as referenced by the x-cortex-tag), this API will merge the specified changes into the existing entity @@ -301,7 +306,8 @@ def patch( params = { "dryRun":dry_run, "appendArrays": append_arrays, - "deleteMarkerValue": delete_marker_value + "deleteMarkerValue": delete_marker_value, + "failIfEntityDoesNotExist": fail_if_not_exist } r = client.patch("api/v1/open-api", data=file_input.read(), params=params, content_type="application/openapi;charset=UTF-8") diff --git a/data/run-time/create-patch-entity.yaml b/data/import/catalog/cli-test-patch-entity.yaml similarity index 57% rename from data/run-time/create-patch-entity.yaml rename to data/import/catalog/cli-test-patch-entity.yaml index 914b459..1bedce1 100644 --- a/data/run-time/create-patch-entity.yaml +++ b/data/import/catalog/cli-test-patch-entity.yaml @@ -1,12 +1,11 @@ openapi: 3.0.0 info: - title: Patch Entity + title: CLI Test Patch Entity description: Entity that will be created to test catalog patch entity - x-cortex-tag: patch-entity - x-cortex-type: component + x-cortex-tag: cli-test-patch-entity + x-cortex-type: service x-cortex-groups: - - public-api-test - x-cortex-definition: {} + - cli-test x-cortex-custom-metadata: owners: - owner-1 diff --git a/data/run-time/patch-entity.yaml b/data/run-time/patch-entity.yaml index 5e7b4c4..95a5c5b 100644 --- a/data/run-time/patch-entity.yaml +++ b/data/run-time/patch-entity.yaml @@ -1,6 +1,6 @@ openapi: 3.0.0 info: - x-cortex-tag: patch-entity + x-cortex-tag: cli-test-patch-entity x-cortex-custom-metadata: owners: - owner-2 diff --git a/tests/test_catalog_patch_entity.py b/tests/test_catalog_patch_entity.py index bf967a1..70c76a6 100644 --- a/tests/test_catalog_patch_entity.py +++ b/tests/test_catalog_patch_entity.py @@ -1,18 +1,7 @@ -from common import * +from tests.helpers.utils import * def test(capsys): - cli(["-q", "catalog", "patch", "-f", "data/run-time/create-patch-entity.yaml"]) - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() + cli(["catalog", "patch", "-a", "-f", "data/run-time/patch-entity.yaml"]) - response = cli_command(capsys, ["catalog", "descriptor", "-t", "patch-entity"]) - assert response['info']['x-cortex-tag'] == "patch-entity" - - # Need to clear captured system output from the above commands to clear the way for the next one. - capsys.readouterr() - - cli(["-q", "catalog", "patch", "-a", "-f", "data/run-time/patch-entity.yaml"]) - capsys.readouterr() - - response = cli_command(capsys, ["custom-data", "get", "-t", "patch-entity", "-k", "owners"]) + response = cli(["custom-data", "get", "-t", "cli-test-patch-entity", "-k", "owners"]) assert 'owner-2' in response['value'], "owner-2 should have been merged in owners array"