diff --git a/.github/workflows/python-mutation-tests.yml b/.github/workflows/python-mutation-tests.yml index a09885535..2335e69fb 100644 --- a/.github/workflows/python-mutation-tests.yml +++ b/.github/workflows/python-mutation-tests.yml @@ -36,8 +36,8 @@ jobs: poetry install - name: Run mutation tests + # Todo: Drop the --force, it just here for easy local copy and paste run: | - # Todo: Drop the --force, it just here for easy local copy and paste poetry run cosmic-ray init cosmic-ray.toml cosmic-ray.sqlite --force echo "Initialised" poetry run cosmic-ray --verbosity INFO exec cosmic-ray.toml cosmic-ray.sqlite @@ -72,7 +72,10 @@ jobs: poetry install - name: Run mutation tests + # Todo: Remove hack to copy dbt_platform_helper before running clean tests run: | + mkdir mutants + cp -R dbt_platform_helper mutants poetry run mutmut run - name: Report results diff --git a/tests/dbt_platform_helper/COMMANDS.md b/tests/dbt_platform_helper/COMMANDS.md new file mode 100644 index 000000000..f6370d576 --- /dev/null +++ b/tests/dbt_platform_helper/COMMANDS.md @@ -0,0 +1,890 @@ +# Commands Reference + +- [platform-helper](#platform-helper) +- [platform-helper application](#platform-helper-application) +- [platform-helper application container-stats](#platform-helper-application-container-stats) +- [platform-helper application task-stats](#platform-helper-application-task-stats) +- [platform-helper codebase](#platform-helper-codebase) +- [platform-helper codebase prepare](#platform-helper-codebase-prepare) +- [platform-helper codebase list](#platform-helper-codebase-list) +- [platform-helper codebase build](#platform-helper-codebase-build) +- [platform-helper codebase deploy](#platform-helper-codebase-deploy) +- [platform-helper conduit](#platform-helper-conduit) +- [platform-helper config](#platform-helper-config) +- [platform-helper config validate](#platform-helper-config-validate) +- [platform-helper config aws](#platform-helper-config-aws) +- [platform-helper copilot](#platform-helper-copilot) +- [platform-helper copilot make-addons](#platform-helper-copilot-make-addons) +- [platform-helper environment](#platform-helper-environment) +- [platform-helper environment offline](#platform-helper-environment-offline) +- [platform-helper environment online](#platform-helper-environment-online) +- [platform-helper environment generate](#platform-helper-environment-generate) +- [platform-helper environment generate-terraform](#platform-helper-environment-generate-terraform) +- [platform-helper generate](#platform-helper-generate) +- [platform-helper pipeline](#platform-helper-pipeline) +- [platform-helper pipeline generate](#platform-helper-pipeline-generate) +- [platform-helper secrets](#platform-helper-secrets) +- [platform-helper secrets copy](#platform-helper-secrets-copy) +- [platform-helper secrets list](#platform-helper-secrets-list) +- [platform-helper notify](#platform-helper-notify) +- [platform-helper notify environment-progress](#platform-helper-notify-environment-progress) +- [platform-helper notify add-comment](#platform-helper-notify-add-comment) +- [platform-helper database](#platform-helper-database) +- [platform-helper database dump](#platform-helper-database-dump) +- [platform-helper database load](#platform-helper-database-load) +- [platform-helper database copy](#platform-helper-database-copy) +- [platform-helper version](#platform-helper-version) +- [platform-helper version get-platform-helper-for-project](#platform-helper-version-get-platform-helper-for-project) + +# platform-helper + +## Usage + +``` +platform-helper [--version] +``` + +## Options + +- `--version ` _Defaults to False._ + - Show the version and exit. +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`application` ↪](#platform-helper-application) +- [`codebase` ↪](#platform-helper-codebase) +- [`conduit` ↪](#platform-helper-conduit) +- [`config` ↪](#platform-helper-config) +- [`copilot` ↪](#platform-helper-copilot) +- [`database` ↪](#platform-helper-database) +- [`environment` ↪](#platform-helper-environment) +- [`generate` ↪](#platform-helper-generate) +- [`notify` ↪](#platform-helper-notify) +- [`pipeline` ↪](#platform-helper-pipeline) +- [`secrets` ↪](#platform-helper-secrets) +- [`version` ↪](#platform-helper-version) + +# platform-helper application + +[↩ Parent](#platform-helper) + + [DEPRECATED] Application metrics. + +## Usage + +``` +platform-helper application (container-stats|task-stats) +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`container-stats` ↪](#platform-helper-application-container-stats) +- [`task-stats` ↪](#platform-helper-application-task-stats) + +# platform-helper application container-stats + +[↩ Parent](#platform-helper-application) + + [DEPRECATED] Command to get application container level metrics. + +## Usage + +``` +platform-helper application container-stats --env --app + [--storage] [--network] +``` + +## Options + +- `--env ` + +- `--app ` + +- `--storage ` _Defaults to False._ + +- `--network ` _Defaults to False._ + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper application task-stats + +[↩ Parent](#platform-helper-application) + + [DEPRECATED] Command to get application task level metrics. + +## Usage + +``` +platform-helper application task-stats --env --app [--disk] + [--storage] [--network] +``` + +## Options + +- `--env ` + +- `--app ` + +- `--disk ` _Defaults to False._ + +- `--storage ` _Defaults to False._ + +- `--network ` _Defaults to False._ + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper codebase + +[↩ Parent](#platform-helper) + + Codebase commands. + +## Usage + +``` +platform-helper codebase (prepare|list|build|deploy) +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`build` ↪](#platform-helper-codebase-build) +- [`deploy` ↪](#platform-helper-codebase-deploy) +- [`list` ↪](#platform-helper-codebase-list) +- [`prepare` ↪](#platform-helper-codebase-prepare) + +# platform-helper codebase prepare + +[↩ Parent](#platform-helper-codebase) + + Sets up an application codebase for use within a DBT platform project. + +## Usage + +``` +platform-helper codebase prepare +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper codebase list + +[↩ Parent](#platform-helper-codebase) + + List available codebases for the application. + +## Usage + +``` +platform-helper codebase list --app [--with-images] +``` + +## Options + +- `--app ` + - AWS application name +- `--with-images ` _Defaults to False._ + - List up to the last 10 images tagged for this codebase +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper codebase build + +[↩ Parent](#platform-helper-codebase) + + Trigger a CodePipeline pipeline based build. + +## Usage + +``` +platform-helper codebase build --app --codebase + --commit +``` + +## Options + +- `--app ` + - AWS application name +- `--codebase ` + - The codebase name as specified in the platform-config.yml file +- `--commit ` + - GitHub commit hash +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper codebase deploy + +[↩ Parent](#platform-helper-codebase) + +## Usage + +``` +platform-helper codebase deploy --app --env --codebase + --commit +``` + +## Options + +- `--app ` + - AWS application name +- `--env ` + - AWS Copilot environment +- `--codebase ` + - The codebase name as specified in the platform-config.yml file +- `--commit ` + - GitHub commit hash +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper conduit + +[↩ Parent](#platform-helper) + + Create a conduit connection to an addon. + +## Usage + +``` +platform-helper conduit + --app --env [--access (read|write|admin)] +``` + +## Arguments + +- `addon_name ` + +## Options + +- `--app ` + - AWS application name +- `--env ` + - AWS environment name +- `--access ` _Defaults to read._ + - Allow write or admin access to database addons +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper config + +[↩ Parent](#platform-helper) + + Perform actions on configuration files. + +## Usage + +``` +platform-helper config (validate|aws) +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`aws` ↪](#platform-helper-config-aws) +- [`validate` ↪](#platform-helper-config-validate) + +# platform-helper config validate + +[↩ Parent](#platform-helper-config) + + Validate deployment or application configuration. + +## Usage + +``` +platform-helper config validate +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper config aws + +[↩ Parent](#platform-helper-config) + + Writes a local config file containing all the AWS profiles to which the + logged in user has access. + + If no `--file-path` is specified, defaults to `~/.aws/config`. + +## Usage + +``` +platform-helper config aws [--file-path ] +``` + +## Options + +- `--file-path +-fp ` _Defaults to ~/.aws/config._ + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper copilot + +[↩ Parent](#platform-helper) + +## Usage + +``` +platform-helper copilot make-addons +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`make-addons` ↪](#platform-helper-copilot-make-addons) + +# platform-helper copilot make-addons + +[↩ Parent](#platform-helper-copilot) + + Generate addons CloudFormation for each environment. + +## Usage + +``` +platform-helper copilot make-addons +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper environment + +[↩ Parent](#platform-helper) + + Commands affecting environments. + +## Usage + +``` +platform-helper environment (offline|online|generate|generate-terraform) +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`generate` ↪](#platform-helper-environment-generate) +- [`generate-terraform` ↪](#platform-helper-environment-generate-terraform) +- [`offline` ↪](#platform-helper-environment-offline) +- [`online` ↪](#platform-helper-environment-online) + +# platform-helper environment offline + +[↩ Parent](#platform-helper-environment) + + Take load-balanced web services offline with a maintenance page. + +## Usage + +``` +platform-helper environment offline --app --env --svc + [--template (default|migration|dmas-migration)] + [--vpc ] +``` + +## Options + +- `--app ` + +- `--env ` + +- `--svc ` _Defaults to ['web']._ + +- `--template ` _Defaults to default._ + - The maintenance page you wish to put up. +- `--vpc ` + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper environment online + +[↩ Parent](#platform-helper-environment) + + Remove a maintenance page from an environment. + +## Usage + +``` +platform-helper environment online --app --env +``` + +## Options + +- `--app ` + +- `--env ` + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper environment generate + +[↩ Parent](#platform-helper-environment) + +## Usage + +``` +platform-helper environment generate --name [--vpc-name ] +``` + +## Options + +- `--vpc-name ` + +- `--name +-n ` + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper environment generate-terraform + +[↩ Parent](#platform-helper-environment) + + Generate terraform manifest for the specified environment. + +## Usage + +``` +platform-helper environment generate-terraform --name [--terraform-platform-modules-version ] +``` + +## Options + +- `--name +-n ` + - The name of the environment to generate a manifest for. +- `--terraform-platform-modules-version ` + - Override the default version of terraform-platform-modules. (Default version is '5'). +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper generate + +[↩ Parent](#platform-helper) + + Generate deployment pipeline configuration files and generate addons + CloudFormation template files for each environment. + + Wraps pipeline generate and make-addons. + +## Usage + +``` +platform-helper generate +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper pipeline + +[↩ Parent](#platform-helper) + + Pipeline commands. + +## Usage + +``` +platform-helper pipeline generate +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`generate` ↪](#platform-helper-pipeline-generate) + +# platform-helper pipeline generate + +[↩ Parent](#platform-helper-pipeline) + + Given a platform-config.yml file, generate environment and service + deployment pipelines. + + This command does the following in relation to the environment pipelines: + - Reads contents of `platform-config.yml/environment-pipelines` configuration. + The `terraform/environment-pipelines//main.tf` file is generated using this configuration. + The `main.tf` file is then used to generate Terraform for creating an environment pipeline resource. + + This command does the following in relation to the codebase pipelines: + - Generates the copilot pipeline manifest.yml for copilot/pipelines/ + + (Deprecated) This command does the following for non terraform projects (legacy AWS Copilot): + - Generates the copilot manifest.yml for copilot/environments/ + +## Usage + +``` +platform-helper pipeline generate [--terraform-platform-modules-version ] + [--deploy-branch ] +``` + +## Options + +- `--terraform-platform-modules-version ` + - Override the default version of terraform-platform-modules with a specific version or branch. +Precedence of version used is version supplied via CLI, then the version found in +platform-config.yml/default_versions/terraform-platform-modules. +In absence of these inputs, defaults to version '5'. +- `--deploy-branch ` + - Specify the branch of -deploy used to configure the source stage in the environment-pipeline resource. +This is generated from the terraform/environments-pipeline//main.tf file. +(Default -deploy branch is specified in +-deploy/platform-config.yml/environment_pipelines//branch). +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper secrets + +[↩ Parent](#platform-helper) + +## Usage + +``` +platform-helper secrets (copy|list) +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`copy` ↪](#platform-helper-secrets-copy) +- [`list` ↪](#platform-helper-secrets-list) + +# platform-helper secrets copy + +[↩ Parent](#platform-helper-secrets) + + Copy secrets from one environment to a new environment. + +## Usage + +``` +platform-helper secrets copy + --project-profile +``` + +## Arguments + +- `source_environment ` +- `target_environment ` + +## Options + +- `--project-profile ` + - AWS account profile name +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper secrets list + +[↩ Parent](#platform-helper-secrets) + + List secret names and values for an environment. + +## Usage + +``` +platform-helper secrets list +``` + +## Arguments + +- `app ` +- `env ` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper notify + +[↩ Parent](#platform-helper) + + Send Slack notifications + +## Usage + +``` +platform-helper notify (environment-progress|add-comment) +``` + +## Options + +- `--help ` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`add-comment` ↪](#platform-helper-notify-add-comment) +- [`environment-progress` ↪](#platform-helper-notify-environment-progress) + +# platform-helper notify environment-progress + +[↩ Parent](#platform-helper-notify) + + Send environment progress notifications + +## Usage + +``` +platform-helper notify environment-progress + + [--build-arn ] + [--repository ] + [--commit-sha ] + [--slack-ref ] +``` + +## Arguments + +- `slack-channel-id ` +- `slack-token ` +- `message ` + +## Options + +- `--build-arn ` + +- `--repository ` + +- `--commit-sha ` + +- `--slack-ref ` + - Slack message reference +- `--help ` _Defaults to False._ + - Show this message and exit. + +# platform-helper notify add-comment + +[↩ Parent](#platform-helper-notify) + + Add comment to a notification + +## Usage + +``` +platform-helper notify add-comment + + [--title ] [--send-to-main-channel <send_to_main_channel>] +``` + +## Arguments + +- `slack-channel-id <text>` +- `slack-token <text>` +- `slack-ref <text>` +- `message <text>` + +## Options + +- `--title <text>` + - Message title +- `--send-to-main-channel <boolean>` _Defaults to False._ + - Send to main channel +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. + +# platform-helper database + +[↩ Parent](#platform-helper) + + Commands to copy data between databases. + +## Usage + +``` +platform-helper database (dump|load|copy) +``` + +## Options + +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`copy` ↪](#platform-helper-database-copy) +- [`dump` ↪](#platform-helper-database-dump) +- [`load` ↪](#platform-helper-database-load) + +# platform-helper database dump + +[↩ Parent](#platform-helper-database) + + Dump a database into an S3 bucket. + +## Usage + +``` +platform-helper database dump --from <from_env> --database <database> + [--app <application>] [--from-vpc <from_vpc>] +``` + +## Options + +- `--app <text>` + - The application name. Required unless you are running the command from your deploy repo +- `--from <text>` + - The environment you are dumping data from +- `--database <text>` + - The name of the database you are dumping data from +- `--from-vpc <text>` + - The vpc the specified environment is running in. Required unless you are running the command from your deploy repo +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. + +# platform-helper database load + +[↩ Parent](#platform-helper-database) + + Load a database from an S3 bucket. + +## Usage + +``` +platform-helper database load --to <to_env> --database <database> + [--app <application>] [--to-vpc <to_vpc>] + [--auto-approve] +``` + +## Options + +- `--app <text>` + - The application name. Required unless you are running the command from your deploy repo +- `--to <text>` + - The environment you are loading data into +- `--database <text>` + - The name of the database you are loading data into +- `--to-vpc <text>` + - The vpc the specified environment is running in. Required unless you are running the command from your deploy repo +- `--auto-approve <boolean>` _Defaults to False._ + +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. + +# platform-helper database copy + +[↩ Parent](#platform-helper-database) + + Copy a database between environments. + +## Usage + +``` +platform-helper database copy --from <from_env> --to <to_env> --database <database> + --svc <service> [--app <application>] [--from-vpc <from_vpc>] + [--to-vpc <to_vpc>] [--template (default|migration|dmas-migration)] + [--auto-approve] [--no-maintenance-page] +``` + +## Options + +- `--app <text>` + - The application name. Required unless you are running the command from your deploy repo +- `--from <text>` + - The environment you are copying data from +- `--to <text>` + - The environment you are copying data into +- `--database <text>` + - The name of the database you are copying +- `--from-vpc <text>` + - The vpc the environment you are copying from is running in. Required unless you are running the command from your deploy repo +- `--to-vpc <text>` + - The vpc the environment you are copying into is running in. Required unless you are running the command from your deploy repo +- `--auto-approve <boolean>` _Defaults to False._ + +- `--svc <text>` _Defaults to ['web']._ + +- `--template <choice>` _Defaults to default._ + - The maintenance page you wish to put up. +- `--no-maintenance-page <boolean>` _Defaults to False._ + +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. + +# platform-helper version + +[↩ Parent](#platform-helper) + + Contains subcommands for getting version information about the + current project. + +## Usage + +``` +platform-helper version get-platform-helper-for-project +``` + +## Options + +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. + +## Commands + +- [`get-platform-helper-for-project` ↪](#platform-helper-version-get-platform-helper-for-project) + +# platform-helper version get-platform-helper-for-project + +[↩ Parent](#platform-helper-version) + + Print the version of platform-tools required by the current project + +## Usage + +``` +platform-helper version get-platform-helper-for-project [--pipeline <pipeline>] +``` + +## Options + +- `--pipeline <text>` + - Take into account platform-tools version overrides in the specified pipeline +- `--help <boolean>` _Defaults to False._ + - Show this message and exit. diff --git a/tests/dbt_platform_helper/README.md b/tests/dbt_platform_helper/README.md new file mode 100644 index 000000000..ec58ab474 --- /dev/null +++ b/tests/dbt_platform_helper/README.md @@ -0,0 +1,52 @@ +# DBT Platform Helper + +This package contains a set of tools in the form of a Command Line Interface (CLI) primarily for automating operations used when working with the Department for Business and Trade (DBT) Platform. + +## Getting started + +To use the Python package `dbt-platform-helper`, follow the steps below. + +### Installation + +```shell +pip install dbt-platform-helper +``` + +### Usage + +Check `dbt-platform-helper` has installed successfully by executing `platform-helper` in the terminal emulator. You should see an output similar to the following: + +```shell +$ platform-helper +Usage: platform-helper [OPTIONS] COMMAND [ARGS]... + +Options: + --version Show the version and exit. + --help Show this message and exit. + +Commands: + bootstrap + check-cloudformation Runs the checks passed in the command arguments. + codebuild + copilot + domain +``` + +Each command can be executed without any arguments or additional commands to present the `help` message. + +Below is the output for the `bootstrap` command as of version `0.1.2`. + +```shell +$ platform-helper bootstrap --help +Usage: platform-helper bootstrap [OPTIONS] COMMAND [ARGS]... + +Options: + --help Show this message and exit. + +Commands: + instructions Show migration instructions. + make-config Generate copilot boilerplate code. + migrate-secrets Migrate secrets from your gov paas application to... +``` + +See the [Commands Reference](https://github.com/uktrade/platform-tools/blob/main/dbt_platform_helper/COMMANDS.md) for a list of all available subcommands. diff --git a/tests/dbt_platform_helper/__init__.py b/tests/dbt_platform_helper/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/dbt_platform_helper/addons-template-map.yml b/tests/dbt_platform_helper/addons-template-map.yml new file mode 100644 index 000000000..eb9d9b610 --- /dev/null +++ b/tests/dbt_platform_helper/addons-template-map.yml @@ -0,0 +1,29 @@ +# This file maps addon types to svc level templates + +# explanation: + +# {addons-type}: +# svc: +# - template: path/to/template.yml + +redis: {} +postgres: {} +opensearch: {} +s3: + svc: + - template: addons/svc/s3-policy.yml +s3-policy: + svc: + - template: addons/svc/s3-policy.yml +appconfig-ipfilter: + svc: + - template: addons/svc/appconfig-ipfilter.yml +subscription-filter: + svc: + - template: addons/svc/subscription-filter.yml +monitoring: {} +vpc: {} +alb: {} +prometheus-policy: + svc: + - template: addons/svc/prometheus-policy.yml diff --git a/tests/dbt_platform_helper/constants.py b/tests/dbt_platform_helper/constants.py new file mode 100644 index 000000000..d7d1ed649 --- /dev/null +++ b/tests/dbt_platform_helper/constants.py @@ -0,0 +1,6 @@ +PLATFORM_CONFIG_FILE = "platform-config.yml" +PLATFORM_HELPER_VERSION_FILE = ".platform-helper-version" +CODEBASE_PIPELINES_KEY = "codebase_pipelines" +ENVIRONMENTS_KEY = "environments" +DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION = "5" +PLATFORM_HELPER_CACHE_FILE = ".platform-helper-config-cache.yml" diff --git a/tests/dbt_platform_helper/default-extensions.yml b/tests/dbt_platform_helper/default-extensions.yml new file mode 100644 index 000000000..ad140efd8 --- /dev/null +++ b/tests/dbt_platform_helper/default-extensions.yml @@ -0,0 +1,26 @@ +# rules in this file are applied by default + +# Add the IP filter AppConfig policy to every service +appconfig-ipfilter: + type: appconfig-ipfilter + services: __all__ + +subscription-filter: + type: subscription-filter + services: __all__ + +vpc: + type: vpc + +monitoring: + type: monitoring + environments: + "*": + enable_ops_center: false + +prometheus: + type: prometheus-policy + services: __all__ + environments: + "*": + role_arn: arn:aws:iam::480224066791:role/amp-prometheus-role diff --git a/tests/dbt_platform_helper/jinja2_tags.py b/tests/dbt_platform_helper/jinja2_tags.py new file mode 100644 index 000000000..5ca9304dd --- /dev/null +++ b/tests/dbt_platform_helper/jinja2_tags.py @@ -0,0 +1,20 @@ +import datetime +from importlib.metadata import version + +from jinja2_simple_tags import StandaloneTag + + +class VersionTag(StandaloneTag): + tags = {"version_info"} + + def render(self): + format = "%Y-%m-%d %H:%M:%S" + time = datetime.datetime.now().strftime(format) + return f"Generated by platform-helper {version('dbt-platform-helper')} / {time}" + + +class ExtraHeaderTag(StandaloneTag): + tags = {"extra_header"} + + def render(self): + return f"WARNING: This is an autogenerated file, not for manual editing." diff --git a/tests/dbt_platform_helper/providers/load_balancers.py b/tests/dbt_platform_helper/providers/load_balancers.py new file mode 100644 index 000000000..7be823ed6 --- /dev/null +++ b/tests/dbt_platform_helper/providers/load_balancers.py @@ -0,0 +1,51 @@ +import boto3 + + +def find_load_balancer(session: boto3.Session, app: str, env: str) -> str: + lb_client = session.client("elbv2") + + describe_response = lb_client.describe_load_balancers() + load_balancers = [lb["LoadBalancerArn"] for lb in describe_response["LoadBalancers"]] + + load_balancers = lb_client.describe_tags(ResourceArns=load_balancers)["TagDescriptions"] + + load_balancer_arn = None + for lb in load_balancers: + tags = {t["Key"]: t["Value"] for t in lb["Tags"]} + if tags.get("copilot-application") == app and tags.get("copilot-environment") == env: + load_balancer_arn = lb["ResourceArn"] + + if not load_balancer_arn: + raise LoadBalancerNotFoundError() + + return load_balancer_arn + + +def find_https_listener(session: boto3.Session, app: str, env: str) -> str: + load_balancer_arn = find_load_balancer(session, app, env) + lb_client = session.client("elbv2") + listeners = lb_client.describe_listeners(LoadBalancerArn=load_balancer_arn)["Listeners"] + + listener_arn = None + + try: + listener_arn = next(l["ListenerArn"] for l in listeners if l["Protocol"] == "HTTPS") + except StopIteration: + pass + + if not listener_arn: + raise ListenerNotFoundError() + + return listener_arn + + +class LoadBalancerNotFoundError(Exception): + pass + + +class ListenerNotFoundError(Exception): + pass + + +class ListenerRuleNotFoundError(Exception): + pass diff --git a/tests/dbt_platform_helper/templates/.copilot/config.yml b/tests/dbt_platform_helper/templates/.copilot/config.yml new file mode 100644 index 000000000..510ae2ae0 --- /dev/null +++ b/tests/dbt_platform_helper/templates/.copilot/config.yml @@ -0,0 +1,7 @@ +repository: {{ repository }} +builder: + name: paketobuildpacks/builder-jammy-base + version: {{ builder_version }} +packages: + - libpq-dev + - libsqlite3-dev diff --git a/tests/dbt_platform_helper/templates/.copilot/image_build_run.sh b/tests/dbt_platform_helper/templates/.copilot/image_build_run.sh new file mode 100755 index 000000000..8ac7a3906 --- /dev/null +++ b/tests/dbt_platform_helper/templates/.copilot/image_build_run.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +# Exit early if something goes wrong +set -e + +# Add commands below to run inside the container after all the other buildpacks have been applied diff --git a/tests/dbt_platform_helper/templates/.copilot/phases/build.sh b/tests/dbt_platform_helper/templates/.copilot/phases/build.sh new file mode 100755 index 000000000..c1a73f284 --- /dev/null +++ b/tests/dbt_platform_helper/templates/.copilot/phases/build.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +# Exit early if something goes wrong +set -e + +# Add commands below to run as part of the build phase diff --git a/tests/dbt_platform_helper/templates/.copilot/phases/install.sh b/tests/dbt_platform_helper/templates/.copilot/phases/install.sh new file mode 100755 index 000000000..17794e84a --- /dev/null +++ b/tests/dbt_platform_helper/templates/.copilot/phases/install.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +# Exit early if something goes wrong +set -e + +# Add commands below to run as part of the install phase diff --git a/tests/dbt_platform_helper/templates/.copilot/phases/post_build.sh b/tests/dbt_platform_helper/templates/.copilot/phases/post_build.sh new file mode 100755 index 000000000..1676bb5b3 --- /dev/null +++ b/tests/dbt_platform_helper/templates/.copilot/phases/post_build.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +# Exit early if something goes wrong +set -e + +# Add commands below to run as part of the post_build phase diff --git a/tests/dbt_platform_helper/templates/.copilot/phases/pre_build.sh b/tests/dbt_platform_helper/templates/.copilot/phases/pre_build.sh new file mode 100755 index 000000000..18b19394e --- /dev/null +++ b/tests/dbt_platform_helper/templates/.copilot/phases/pre_build.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +# Exit early if something goes wrong +set -e + +if [ -f "./.gitmodules" ]; then + echo ".gitmodules file exists. Modifying URLs..." + account_id=$(echo $CODESTAR_CONNECTION_ARN | cut -d':' -f5) + connection_id=$(echo $CODESTAR_CONNECTION_ARN | cut -d'/' -f2) + git_clone_base_url="https://codestar-connections.eu-west-2.amazonaws.com/git-http/$account_id/eu-west-2/$connection_id/uktrade" + + git config --global credential.helper '!aws codecommit credential-helper $@' + git config --global credential.UseHttpPath true + + sed -i "s|url = git@github.com:uktrade/\(.*\).git|url = $git_clone_base_url/\1.git|g" ./.gitmodules + + git submodule update --init --remote --recursive + +else + echo ".gitmodules file does not exist. No URLs to update." +fi + +# Add commands below to run as part of the pre_build phase diff --git a/tests/dbt_platform_helper/templates/addons/README.md b/tests/dbt_platform_helper/templates/addons/README.md new file mode 100644 index 000000000..4f9181220 --- /dev/null +++ b/tests/dbt_platform_helper/templates/addons/README.md @@ -0,0 +1,9 @@ +# Addons + +Addons can exist at the service level (e.g. `copilot/web/addons/some-addon.yml`) or at the environment level (e.g. `copilot/environments/addons/some-addon.yml`). + +We mostly use environment addons because service addons are deleted when you delete the service, which would be bad for things like a database. + +Exceptions to the above include: + +* `s3-policy.yml` which needs to be attached to the service diff --git a/tests/dbt_platform_helper/templates/addons/svc/appconfig-ipfilter.yml b/tests/dbt_platform_helper/templates/addons/svc/appconfig-ipfilter.yml new file mode 100644 index 000000000..89959b2ad --- /dev/null +++ b/tests/dbt_platform_helper/templates/addons/svc/appconfig-ipfilter.yml @@ -0,0 +1,29 @@ +# {% extra_header %} +# {% version_info %} +Parameters: + App: + Type: String + Description: Your application's name. + Env: + Type: String + Description: The environment name your service, job, or workflow is being deployed to. + Name: + Type: String + Description: The name of the service, job, or workflow being deployed. +Resources: + appConfigAccessPolicy: + Metadata: + 'aws:copilot:description': 'An IAM ManagedPolicy for your service to assume the AppConfig role from the tooling account' + Type: AWS::IAM::ManagedPolicy + Properties: + Description: Allows the service to assume the AppConfig role + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: sts:AssumeRole + Resource: "arn:aws:iam::763451185160:role/AppConfigIpFilterRole" +Outputs: + appConfigAccessPolicy: + Description: "The IAM::ManagedPolicy to attach to the task role" + Value: !Ref appConfigAccessPolicy diff --git a/tests/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml b/tests/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml new file mode 100644 index 000000000..df1505249 --- /dev/null +++ b/tests/dbt_platform_helper/templates/addons/svc/prometheus-policy.yml @@ -0,0 +1,34 @@ +Parameters: + App: + Type: String + Description: Your application's name. + Env: + Type: String + Description: The environment name your service, job, or workflow is being deployed to. + Name: + Type: String + Description: The name of the service, job, or workflow being deployed. + +Mappings: + {{ addon_config.prefix }}EnvironmentConfigMap: + # Create an entry for each environment +{% for env_name, config in addon_config.environments.items() %} + {{ env_name }}: + RoleArn: '{{ config.role_arn }}' +{% endfor %} + +Resources: + PromCrossAccountPolicy: + Type: AWS::IAM::ManagedPolicy + Properties: + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - sts:AssumeRole + Resource: !FindInMap [{{ addon_config.prefix }}EnvironmentConfigMap, !Ref Env, RoleArn] +Outputs: + AMPAccessPolicyArn: + Description: "Allow the task to assume the prometheus writer role in the central account" + Value: !Ref PromCrossAccountPolicy diff --git a/tests/dbt_platform_helper/templates/addons/svc/s3-policy.yml b/tests/dbt_platform_helper/templates/addons/svc/s3-policy.yml new file mode 100644 index 000000000..2248f2e31 --- /dev/null +++ b/tests/dbt_platform_helper/templates/addons/svc/s3-policy.yml @@ -0,0 +1,66 @@ +# {% extra_header %} +# {% version_info %} +Metadata: + cfn-lint: + config: + ignore_checks: + - W2001 # Parameter not used + +Parameters: + # Copilot required Parameters... + App: + Type: String + Description: Your application's name. + Env: + Type: String + Description: The environment name your service, job, or workflow is being deployed to. + Name: + Type: String + Description: The name of the service, job, or workflow being deployed. + +Mappings: + {{ addon_config.prefix }}EnvironmentConfigMap: + # Create an entry for each environment +{% for env_name, config in addon_config.environments.items() %} + {{ env_name }}: + BucketName: '{{ config.bucket_name }}' + KmsKeyArn: '{{ config.kms_key_arn }}' +{% endfor %} + +Resources: + {{ addon_config.prefix }}S3AccessPolicy: + Metadata: + 'aws:copilot:description': 'An IAM ManagedPolicy for your service to access the bucket' + Type: AWS::IAM::ManagedPolicy + Properties: + Description: Grants Read access to the S3 bucket. + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: KMSDecryptAndGenerate + Effect: Allow + Action: + - kms:Decrypt + - kms:GenerateDataKey + Resource: !FindInMap [{{ addon_config.prefix }}EnvironmentConfigMap, !Ref Env, KmsKeyArn] + - Sid: S3ObjectActions + Effect: Allow + Action: +{%- if addon_config.readonly %} + - s3:GetObject +{% else %} + - s3:*Object +{% endif %} + Resource: !Sub + - "arn:aws:s3:::${bucket_name}/*" + - bucket_name: !FindInMap [{{ addon_config.prefix }}EnvironmentConfigMap, !Ref Env, BucketName] + - Sid: S3ListAction + Effect: Allow + Action: s3:ListBucket + Resource: !Sub + - "arn:aws:s3:::${bucket_name}" + - bucket_name: !FindInMap [{{ addon_config.prefix }}EnvironmentConfigMap, !Ref Env, BucketName] +Outputs: + {{ addon_config.prefix }}AccessPolicy: + Description: "The IAM::ManagedPolicy to attach to the task role" + Value: !Ref {{ addon_config.prefix }}S3AccessPolicy diff --git a/tests/dbt_platform_helper/templates/addons/svc/subscription-filter.yml b/tests/dbt_platform_helper/templates/addons/svc/subscription-filter.yml new file mode 100644 index 000000000..403893115 --- /dev/null +++ b/tests/dbt_platform_helper/templates/addons/svc/subscription-filter.yml @@ -0,0 +1,26 @@ +# {% extra_header %} +# {% version_info %} + +Parameters: + App: + Type: String + Description: Your application's name. + Env: + Type: String + Description: The environment name your service, job, or workflow is being deployed to. + Name: + Type: String + Description: The name of the service, job, or workflow being deployed. + +Conditions: + CreateProdSubFilter: !Or [!Equals [!Ref Env, prod], !Equals [!Ref Env, production], !Equals [!Ref Env, PROD], !Equals [!Ref Env, PRODUCTION]] + +Resources: + SubscriptionFilter: + Type: AWS::Logs::SubscriptionFilter + Properties: + RoleArn: !Sub "arn:aws:iam::${AWS::AccountId}:role/CWLtoSubscriptionFilterRole" + LogGroupName: !Sub "/copilot/${App}/${Env}/${Name}" + FilterName: !Sub "/copilot/${App}/${Env}/${Name}" + FilterPattern: "" + DestinationArn: !If [CreateProdSubFilter, '{{ log_destination.prod }}', '{{ log_destination.dev }}'] diff --git a/tests/dbt_platform_helper/templates/ci-codebuild-role-policy.json b/tests/dbt_platform_helper/templates/ci-codebuild-role-policy.json new file mode 100644 index 000000000..af3286fe2 --- /dev/null +++ b/tests/dbt_platform_helper/templates/ci-codebuild-role-policy.json @@ -0,0 +1,73 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "CloudWatchLogsPolicy", + "Effect": "Allow", + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Resource": "*" + }, + { + "Sid": "CodeCommitPolicy", + "Effect": "Allow", + "Action": [ + "codecommit:GitPull" + ], + "Resource": "*" + }, + { + "Sid": "S3GetObjectPolicy", + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:GetObjectVersion" + ], + "Resource": "*" + }, + { + "Sid": "S3PutObjectPolicy", + "Effect": "Allow", + "Action": [ + "s3:PutObject" + ], + "Resource": "*" + }, + { + "Sid": "S3BucketIdentity", + "Effect": "Allow", + "Action": [ + "s3:GetBucketAcl", + "s3:GetBucketLocation" + ], + "Resource": "*" + }, + { + "Sid": "AllowPushPull", + "Effect": "Allow", + "Action": [ + "ecr:*", + "ecr-public:*", + "sts:GetServiceBearerToken" + ], + "Resource": "*" + }, + { + "Sid": "GetAccountNAme", + "Effect": "Allow", + "Action": "iam:ListAccountAliases", + "Resource": "*" + }, + { + "Sid": "GetParameters", + "Effect": "Allow", + "Action": [ + "ssm:GetParameters" + ], + "Resource": "arn:aws:ssm:eu-west-2:*:parameter/codebuild/*" + } + ] +} diff --git a/tests/dbt_platform_helper/templates/custom-codebuild-role-policy.json b/tests/dbt_platform_helper/templates/custom-codebuild-role-policy.json new file mode 100644 index 000000000..a7c34c4b0 --- /dev/null +++ b/tests/dbt_platform_helper/templates/custom-codebuild-role-policy.json @@ -0,0 +1,49 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "CloudWatchLogsPolicy", + "Effect": "Allow", + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Resource": "*" + }, + { + "Sid": "CodeCommitPolicy", + "Effect": "Allow", + "Action": [ + "codecommit:GitPull" + ], + "Resource": "*" + }, + { + "Sid": "S3GetObjectPolicy", + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:GetObjectVersion" + ], + "Resource": "*" + }, + { + "Sid": "S3PutObjectPolicy", + "Effect": "Allow", + "Action": [ + "s3:PutObject" + ], + "Resource": "*" + }, + { + "Sid": "S3BucketIdentity", + "Effect": "Allow", + "Action": [ + "s3:GetBucketAcl", + "s3:GetBucketLocation" + ], + "Resource": "*" + } + ] +} diff --git a/tests/dbt_platform_helper/templates/environments/main.tf b/tests/dbt_platform_helper/templates/environments/main.tf new file mode 100644 index 000000000..8f348a6b9 --- /dev/null +++ b/tests/dbt_platform_helper/templates/environments/main.tf @@ -0,0 +1,47 @@ +# {% extra_header %} +# {% version_info %} +locals { + config = yamldecode(file("../../../platform-config.yml")) + environments = local.config["environments"] + env_config = { for name, config in local.environments : name => merge(lookup(local.environments, "*", {}), config) } + args = { + application = "{{ application }}" + services = local.config["extensions"] + dns_account_id = local.env_config["{{ environment }}"]["accounts"]["dns"]["id"] + } +} + +terraform { + required_version = "~> 1.8" + backend "s3" { + bucket = "terraform-platform-state-{{ config.accounts.deploy.name }}" + key = "tfstate/application/{{ application }}-{{ environment }}.tfstate" + region = "eu-west-2" + encrypt = true + kms_key_id = "alias/terraform-platform-state-s3-key-{{ config.accounts.deploy.name }}" + dynamodb_table = "terraform-platform-lockdb-{{ config.accounts.deploy.name }}" + } + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5" + } + } +} + +module "extensions" { + source = "git::https://github.com/uktrade/terraform-platform-modules.git//extensions?depth=1&ref={{terraform_platform_modules_version}}" + + args = local.args + environment = "{{ environment }}" + vpc_name = "{{ config.vpc }}" +} + +/* +Clean up because terraform modules were initially deployed with a -tf suffix. This block moves those modules to naming without a suffix. +Can be removed once all services have moved to the new naming. +*/ +moved { + from = module.extensions-tf + to = module.extensions +} diff --git a/tests/dbt_platform_helper/templates/pipelines/codebase/overrides/bin/override.ts b/tests/dbt_platform_helper/templates/pipelines/codebase/overrides/bin/override.ts new file mode 100644 index 000000000..c81e80393 --- /dev/null +++ b/tests/dbt_platform_helper/templates/pipelines/codebase/overrides/bin/override.ts @@ -0,0 +1,8 @@ +#!/usr/bin/env node +import * as cdk from 'aws-cdk-lib'; +import { TransformedStack } from '../stack'; + +const app = new cdk.App(); +new TransformedStack(app, 'Stack', { + appName: process.env.COPILOT_APPLICATION_NAME || "", +}); diff --git a/tests/dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.deploy.yml b/tests/dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.deploy.yml new file mode 100644 index 000000000..11a4b9158 --- /dev/null +++ b/tests/dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.deploy.yml @@ -0,0 +1,29 @@ +version: 0.2 +env: + git-credential-helper: yes + parameter-store: + SLACK_CHANNEL_ID: /codebuild/slack_oauth_channel + SLACK_TOKEN: /codebuild/slack_oauth_token + variables: + COLOR: false + CI: true +phases: + build: + commands: + - echo "Copilot environment is ${COPILOT_ENVIRONMENT}" + - /work/cli deploy --send-notifications + + post_build: + commands: + - | + if [ "${CODEBUILD_BUILD_SUCCEEDING}" != "1" ]; then + BUILD_ID_PREFIX=$(echo $CODEBUILD_BUILD_ID | cut -d':' -f1) + echo "BUILD_ID_PREFIX - ${BUILD_ID_PREFIX}" + + echo -e "\nInstalling dependencies" + pip install dbt-platform-helper + + MESSAGE=":no_entry::construction: Build failure in codebuild project: <https://eu-west-2.console.aws.amazon.com/codesuite/codebuild/${AWS_ACCOUNT_ID}/projects/${BUILD_ID_PREFIX}/build/${CODEBUILD_BUILD_ID}/?region=eu-west-2|${BUILD_ID_PREFIX} - build ${CODEBUILD_BUILD_NUMBER}>" + + platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "" "${MESSAGE}" + fi diff --git a/tests/dbt_platform_helper/templates/pipelines/environments/buildspec.yml b/tests/dbt_platform_helper/templates/pipelines/environments/buildspec.yml new file mode 100644 index 000000000..c3ea49d6c --- /dev/null +++ b/tests/dbt_platform_helper/templates/pipelines/environments/buildspec.yml @@ -0,0 +1,80 @@ +# {% extra_header %} +# {% version_info %} +# Buildspec runs in the build stage of your environment pipeline to generate the environment CloudFormation stack config. +version: 0.2 +env: + variables: + DYFF_VERSION: 1.5.8 +phases: + install: + commands: + - cd $CODEBUILD_SRC_DIR + - | + if [ ! -f .copilot-version ]; then + echo "Cannot find .copilot-version file" + exit 1 + fi + - COPILOT_VERSION=$(cat .copilot-version) + # Install pyyaml and dbt-platform-helper + - pip install PyYAML dbt-platform-helper + # Reinstall if we require a different version to the latest + - CURRENT_PLATFORM_HELPER_VERSION=$(platform-helper --version) + - export PLATFORM_HELPER_VERSION=$(platform-helper version get-platform-helper-for-project) + - | + if [ ! "${PLATFORM_HELPER_VERSION}" == "${CURRENT_PLATFORM_HELPER_VERSION}" ] + then + pip uninstall dbt-platform-helper + pip install dbt-platform-helper==$PLATFORM_HELPER_VERSION + fi + - mkdir ./build-tools + - cd ./build-tools + # Install copilot + - wget -q "https://ecs-cli-v2-release.s3.amazonaws.com/copilot-linux-v${COPILOT_VERSION}" + - mv "./copilot-linux-v${COPILOT_VERSION}" ./copilot + - chmod +x ./copilot + # Install dyff - yaml differ + - wget -q "https://github.com/homeport/dyff/releases/download/v${DYFF_VERSION}/dyff_${DYFF_VERSION}_linux_amd64.tar.gz" + - tar -zxvf "dyff_${DYFF_VERSION}_linux_amd64.tar.gz" + - chmod +x ./dyff + build: + commands: + - cd $CODEBUILD_SRC_DIR + - cp -r copilot/ current-copilot/ + - platform-helper copilot make-addons + - > + for FILE in $(ls copilot/**/addons/*.yml); do + ./build-tools/dyff between --omit-header $FILE current-$FILE >> ./build-tools/file-differences + done; + - | + if [[ "$(cat ./build-tools/file-differences)" = *[![:space:]]* ]]; then + echo 'Changes are introduced with version ${PLATFORM_HELPER_VERSION} of platform-helper:' + echo + for FILE in $(ls copilot/**/addons/*.yml); do + echo "Changes in $FILE:" + ./build-tools/dyff between --omit-header $FILE current-$FILE + done; + echo + echo 'Ensure you are running version ${PLATFORM_HELPER_VERSION} with pip install dbt-platform-helper==${PLATFORM_HELPER_VERSION}' + echo 'And run platform-helper copilot make-addons to regenerate your addons templates' + exit 1 + fi + post_build: + commands: + - git checkout -- . + - export COLOR="false" + - export CI="true" + - pipeline=$(cat $CODEBUILD_SRC_DIR/copilot/pipelines/environments/manifest.yml | python -c 'import sys, json, yaml; print(json.dumps(yaml.safe_load(sys.stdin.read())))') + - stages=$(echo $pipeline | jq -r '.stages[].name') + # Generate the cloudformation templates. + - > + for env in $stages; do + ./build-tools/copilot env package -n $env --output-dir './infrastructure' --upload-assets --force; + if [ $? -ne 0 ]; then + echo "Cloudformation stack and config files were not generated. Please check build logs to see if there was a manifest validation error." 1>&2; + exit 1; + fi + done; + - ls -lah ./infrastructure +artifacts: + files: + - "infrastructure/*" diff --git a/tests/dbt_platform_helper/templates/pipelines/environments/manifest.yml b/tests/dbt_platform_helper/templates/pipelines/environments/manifest.yml new file mode 100644 index 000000000..f4bcc0161 --- /dev/null +++ b/tests/dbt_platform_helper/templates/pipelines/environments/manifest.yml @@ -0,0 +1,49 @@ +# {% extra_header %} +# {% version_info %} +# This YAML file defines your pipeline: the source repository it tracks and the order of the environments to deploy to. +# For more info: https://aws.github.io/copilot-cli/docs/manifest/pipeline/ + +# The name of the pipeline. +name: environments + +# The version of the schema used in this template. +version: 1 + +# This section defines your source, changes to which trigger your pipeline. +source: + # The name of the provider that is used to store the source artifacts. + # (i.e. GitHub, Bitbucket, CodeCommit) + provider: GitHub + # Additional properties that further specify the location of the artifacts. + properties: + # Todo: Allow for overriding this, but without risking deploying a branch to higher environments + branch: main + repository: https://github.com/{{ git_repo }} + connection_name: {{ app_name }} + +build: + additional_policy: + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - iam:ListAccountAliases + Resource: '*' + +# This section defines the order of the environments your pipeline will deploy to. +stages: +{% for name, environment in pipeline_environments.items() -%} + - # The name of the environment. + name: {{ name }} + deployments: + deploy-env: + template_path: infrastructure/{{ name }}.env.yml + template_config: infrastructure/{{ name }}.env.params.json + stack_name: {{ app_name }}-{{ name }} + # Optional: flag for manual approval action before deployment. + {% if not environment.requires_approval %}# {% endif %}requires_approval: true + # Optional: use test commands to validate this stage of your build. + # test_commands: [echo 'running tests', make test] + +{% endfor %} diff --git a/tests/dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml b/tests/dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml new file mode 100644 index 000000000..9b7539c34 --- /dev/null +++ b/tests/dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml @@ -0,0 +1,21 @@ +# {% extra_header %} +# {% version_info %} +# Give the source stage a more meaningful name +- op: replace + path: /Resources/Pipeline/Properties/Stages/0/Actions/0/Name + value: DeployCodebase + +# Add git metadata to the source output artefact +- op: add + path: /Resources/Pipeline/Properties/Stages/0/Actions/0/Configuration/OutputArtifactFormat + value: CODEBUILD_CLONE_REF + +# Add codestar permissions to codebuild role +- op: add + path: /Resources/BuildProjectPolicy/Properties/PolicyDocument/Statement/- + value: + Effect: Allow + Action: + - codestar-connections:UseConnection + Resource: + - {{ codestar_connection_arn }} diff --git a/tests/dbt_platform_helper/utils/__init__.py b/tests/dbt_platform_helper/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/dbt_platform_helper/utils/application.py b/tests/dbt_platform_helper/utils/application.py new file mode 100644 index 000000000..420689df5 --- /dev/null +++ b/tests/dbt_platform_helper/utils/application.py @@ -0,0 +1,137 @@ +import json +import re +from pathlib import Path +from typing import Dict + +import boto3 +import yaml +from boto3 import Session +from yaml.parser import ParserError + +from dbt_platform_helper.exceptions import ApplicationNotFoundError +from dbt_platform_helper.utils.aws import get_aws_session_or_abort +from dbt_platform_helper.utils.aws import get_profile_name_from_account_id +from dbt_platform_helper.utils.aws import get_ssm_secrets +from dbt_platform_helper.utils.messages import abort_with_error + + +class Environment: + name: str + account_id: str + sessions: Dict[str, boto3.Session] + + def __init__(self, name: str, account_id: str, sessions: Dict[str, boto3.Session]): + self.name = name + self.account_id = account_id + self.sessions = sessions + + @property + def session(self): + if self.account_id not in self.sessions: + self.sessions[self.account_id] = get_aws_session_or_abort( + get_profile_name_from_account_id(self.account_id), + ) + + return self.sessions[self.account_id] + + +class Service: + name: str + kind: str + + def __init__(self, name: str, kind: str): + self.name = name + self.kind = kind + + +class Application: + name: str + environments: Dict[str, Environment] + services: Dict[str, Service] + + def __init__(self, name: str): + self.name = name + self.environments = {} + self.services = {} + + def __str__(self): + output = f"Application {self.name} with" + + environments = [f"{env.name}:{env.account_id}" for env in self.environments.values()] + + if environments: + return f"{output} environments {', '.join(environments)}" + + return f"{output} no environments" + + def __eq__(self, other): + return str(self) == str(other) + + +def load_application(app: str = None, default_session: Session = None) -> Application: + application = Application(app if app else get_application_name()) + current_session = default_session if default_session else get_aws_session_or_abort() + + ssm_client = current_session.client("ssm") + + try: + ssm_client.get_parameter( + Name=f"/copilot/applications/{application.name}", + WithDecryption=False, + ) + except ssm_client.exceptions.ParameterNotFound: + raise ApplicationNotFoundError + + path = f"/copilot/applications/{application.name}/environments" + secrets = get_ssm_secrets(app, None, current_session, path) + + sts_client = current_session.client("sts") + account_id = sts_client.get_caller_identity()["Account"] + sessions = {account_id: current_session} + + def is_environment_key(name): + """ + Match only parameter names that are an environment path with no further + nesting. + + e.g. + - /copilot/applications/test/environments/my_env will match. + - /copilot/applications/test/environments/my_env/addons will not match. + """ + environment_key_regex = r"^/copilot/applications/{}/environments/[^/]*$".format( + application.name + ) + return bool(re.match(environment_key_regex, name)) + + environments = { + env["name"]: Environment(env["name"], env["accountID"], sessions) + for env in [json.loads(s[1]) for s in secrets if is_environment_key(s[0])] + } + application.environments = environments + + response = ssm_client.get_parameters_by_path( + Path=f"/copilot/applications/{application.name}/components", + Recursive=False, + WithDecryption=False, + ) + + application.services = { + svc["name"]: Service(svc["name"], svc["type"]) + for svc in [json.loads(parameter["Value"]) for parameter in response["Parameters"]] + } + + return application + + +def get_application_name(): + app_name = None + try: + app_config = yaml.safe_load(Path("copilot/.workspace").read_text()) + app_name = app_config["application"] + except (FileNotFoundError, ParserError): + pass + + if app_name is None: + abort_with_error("Cannot get application name. No copilot/.workspace file found") + + return app_name diff --git a/tests/dbt_platform_helper/utils/arn_parser.py b/tests/dbt_platform_helper/utils/arn_parser.py new file mode 100644 index 000000000..3891f1294 --- /dev/null +++ b/tests/dbt_platform_helper/utils/arn_parser.py @@ -0,0 +1,59 @@ +from dbt_platform_helper.exceptions import ValidationException + + +class ARN: + def __init__(self, arn, *args, **kwargs): + # store the original ARN + self.__source = arn + + arn_parts = arn.split(":", 7) + + if len(arn_parts) != 7: + raise ValidationException(f"Invalid ARN: {arn}") + + # parse and store ARN parts + # arn:partition:service:region:account-id:resource-type:resource-id + ( + _, + partition, + service, + region, + account, + project, + build_id, + ) = arn_parts + + self.__partition = partition + self.__service = service + self.__region = region + self.__account_id = account + self.__project = project + self.__build_id = build_id + + @property + def source(self): + return self.__source + + @property + def partition(self): + return self.__partition + + @property + def service(self): + return self.__service + + @property + def region(self): + return self.__region + + @property + def account_id(self): + return self.__account_id + + @property + def project(self): + return self.__project + + @property + def build_id(self): + return self.__build_id diff --git a/tests/dbt_platform_helper/utils/aws.py b/tests/dbt_platform_helper/utils/aws.py new file mode 100644 index 000000000..0ac6399af --- /dev/null +++ b/tests/dbt_platform_helper/utils/aws.py @@ -0,0 +1,554 @@ +import json +import os +import urllib.parse +from configparser import ConfigParser +from pathlib import Path +from typing import Tuple + +import boto3 +import botocore +import botocore.exceptions +import click +import yaml +from boto3 import Session + +from dbt_platform_helper.exceptions import AWSException +from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError +from dbt_platform_helper.exceptions import ImageNotFoundError +from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.utils.files import cache_refresh_required +from dbt_platform_helper.utils.files import read_supported_versions_from_cache +from dbt_platform_helper.utils.files import write_to_cache + +SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/" +SSM_PATH = "/copilot/{app}/{env}/secrets/{name}" +AWS_SESSION_CACHE = {} + + +def get_aws_session_or_abort(aws_profile: str = None) -> boto3.session.Session: + REFRESH_TOKEN_MESSAGE = ( + "To refresh this SSO session run `aws sso login` with the corresponding profile" + ) + aws_profile = aws_profile or os.getenv("AWS_PROFILE") + if aws_profile in AWS_SESSION_CACHE: + return AWS_SESSION_CACHE[aws_profile] + + click.secho(f'Checking AWS connection for profile "{aws_profile}"...', fg="cyan") + + try: + session = boto3.session.Session(profile_name=aws_profile) + sts = session.client("sts") + account_id, user_id = get_account_details(sts) + click.secho("Credentials are valid.", fg="green") + + except botocore.exceptions.ProfileNotFound: + _handle_error(f'AWS profile "{aws_profile}" is not configured.') + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "ExpiredToken": + _handle_error( + f"Credentials are NOT valid. \nPlease login with: aws sso login --profile {aws_profile}" + ) + except botocore.exceptions.NoCredentialsError: + _handle_error("There are no credentials set for this session.", REFRESH_TOKEN_MESSAGE) + except botocore.exceptions.UnauthorizedSSOTokenError: + _handle_error("The SSO Token used for this session is unauthorised.", REFRESH_TOKEN_MESSAGE) + except botocore.exceptions.TokenRetrievalError: + _handle_error("Unable to retrieve the Token for this session.", REFRESH_TOKEN_MESSAGE) + except botocore.exceptions.SSOTokenLoadError: + _handle_error( + "The SSO session associated with this profile has expired, is not set or is otherwise invalid.", + REFRESH_TOKEN_MESSAGE, + ) + + alias_client = session.client("iam") + account_name = alias_client.list_account_aliases().get("AccountAliases", []) + + _log_account_info(account_name, account_id) + + click.echo( + click.style("User: ", fg="yellow") + + click.style(f"{user_id.split(':')[-1]}\n", fg="white", bold=True) + ) + + AWS_SESSION_CACHE[aws_profile] = session + return session + + +def _handle_error(message: str, refresh_token_message: str = None) -> None: + full_message = message + (" " + refresh_token_message if refresh_token_message else "") + click.secho(full_message, fg="red") + exit(1) + + +def _log_account_info(account_name: list, account_id: str) -> None: + if account_name: + click.echo( + click.style("Logged in with AWS account: ", fg="yellow") + + click.style(f"{account_name[0]}/{account_id}", fg="white", bold=True) + ) + else: + click.echo( + click.style("Logged in with AWS account id: ", fg="yellow") + + click.style(f"{account_id}", fg="white", bold=True) + ) + + +class NoProfileForAccountIdError(Exception): + def __init__(self, account_id): + super().__init__(f"No profile found for account {account_id}") + + +def get_profile_name_from_account_id(account_id: str): + aws_config = ConfigParser() + aws_config.read(Path.home().joinpath(".aws/config")) + for section in aws_config.sections(): + found_account_id = aws_config[section].get( + "sso_account_id", aws_config[section].get("profile_account_id", None) + ) + if account_id == found_account_id: + return section.removeprefix("profile ") + + raise NoProfileForAccountIdError(account_id) + + +def get_ssm_secret_names(app, env): + session = get_aws_session_or_abort() + client = session.client("ssm") + + path = SSM_BASE_PATH.format(app=app, env=env) + + params = dict( + Path=path, + Recursive=False, + WithDecryption=True, + MaxResults=10, + ) + + secret_names = [] + + while True: + response = client.get_parameters_by_path(**params) + + for secret in response["Parameters"]: + secret_names.append(secret["Name"]) + + if "NextToken" in response: + params["NextToken"] = response["NextToken"] + else: + break + + return sorted(secret_names) + + +def get_ssm_secrets(app, env, session=None, path=None): + """Return secrets from AWS Parameter Store as a list of tuples with the + secret name and secret value.""" + + if not session: + session = get_aws_session_or_abort() + client = session.client("ssm") + + if not path: + path = SSM_BASE_PATH.format(app=app, env=env) + + params = dict( + Path=path, + Recursive=False, + WithDecryption=True, + MaxResults=10, + ) + + secrets = [] + + while True: + response = client.get_parameters_by_path(**params) + + for secret in response["Parameters"]: + secrets.append((secret["Name"], secret["Value"])) + + if "NextToken" in response: + params["NextToken"] = response["NextToken"] + else: + break + + return sorted(secrets) + + +def set_ssm_param( + app, env, param_name, param_value, overwrite, exists, description="Copied from Cloud Foundry." +): + session = get_aws_session_or_abort() + client = session.client("ssm") + + parameter_args = dict( + Name=param_name, + Description=description, + Value=param_value, + Type="SecureString", + Overwrite=overwrite, + Tags=[ + {"Key": "copilot-application", "Value": app}, + {"Key": "copilot-environment", "Value": env}, + ], + Tier="Intelligent-Tiering", + ) + + if overwrite and not exists: + raise ValidationException( + """Arguments "overwrite" is set to True, but "exists" is set to False.""" + ) + + if overwrite and exists: + # Tags can't be updated when overwriting + del parameter_args["Tags"] + + client.put_parameter(**parameter_args) + + +def check_response(response): + if response["ResponseMetadata"]["HTTPStatusCode"] != 200: + click.secho( + f"Unknown response error from AWS.\nStatus Code: {response['ResponseMetadata']['HTTPStatusCode']}", + fg="red", + ) + exit() + + +def get_codestar_connection_arn(app_name): + session = get_aws_session_or_abort() + response = session.client("codestar-connections").list_connections() + + for connection in response["Connections"]: + if connection["ConnectionName"] == app_name: + return connection["ConnectionArn"] + + +def get_account_details(sts_client=None): + if not sts_client: + sts_client = get_aws_session_or_abort().client("sts") + response = sts_client.get_caller_identity() + + return response["Account"], response["UserId"] + + +def get_public_repository_arn(repository_uri): + session = get_aws_session_or_abort() + response = session.client("ecr-public", region_name="us-east-1").describe_repositories() + repository = [ + repo for repo in response["repositories"] if repo["repositoryUri"] == repository_uri + ] + + return repository[0]["repositoryArn"] if repository else None + + +def get_load_balancer_domain_and_configuration( + project_session: Session, app: str, env: str, svc: str +) -> Tuple[str, dict]: + response = get_load_balancer_configuration(project_session, app, env, svc) + + # Find the domain name + with open(f"./copilot/{svc}/manifest.yml", "r") as fd: + conf = yaml.safe_load(fd) + if "environments" in conf: + if env in conf["environments"]: + for domain in conf["environments"].items(): + if domain[0] == env: + if ( + domain[1] is None + or domain[1]["http"] is None + or domain[1]["http"]["alias"] is None + ): + click.secho( + f"No domains found, please check the ./copilot/{svc}/manifest.yml file", + fg="red", + ) + exit() + domain_name = domain[1]["http"]["alias"] + else: + click.secho( + f"Environment {env} not found, please check the ./copilot/{svc}/manifest.yml file", + fg="red", + ) + exit() + + return domain_name, response["LoadBalancers"][0] + + +def get_load_balancer_configuration( + project_session: Session, app: str, env: str, svc: str +) -> list[Session]: + proj_client = project_session.client("ecs") + + response = proj_client.list_clusters() + check_response(response) + no_items = True + for cluster_arn in response["clusterArns"]: + cluster_name = cluster_arn.split("/")[1] + if cluster_name.startswith(f"{app}-{env}-Cluster"): + no_items = False + break + + if no_items: + click.echo( + click.style("There are no clusters for environment ", fg="red") + + click.style(f"{env} ", fg="white", bold=True) + + click.style("of application ", fg="red") + + click.style(f"{app} ", fg="white", bold=True) + + click.style("in AWS account ", fg="red") + + click.style(f"{project_session.profile_name}", fg="white", bold=True), + ) + exit() + + response = proj_client.list_services(cluster=cluster_name) + check_response(response) + no_items = True + for service_arn in response["serviceArns"]: + fully_qualified_service_name = service_arn.split("/")[2] + if fully_qualified_service_name.startswith(f"{app}-{env}-{svc}-Service"): + no_items = False + break + + if no_items: + click.echo( + click.style("There are no services called ", fg="red") + + click.style(f"{svc} ", fg="white", bold=True) + + click.style("for environment ", fg="red") + + click.style(f"{env} ", fg="white", bold=True) + + click.style("of application ", fg="red") + + click.style(f"{app} ", fg="white", bold=True) + + click.style("in AWS account ", fg="red") + + click.style(f"{project_session.profile_name}", fg="white", bold=True), + ) + exit() + + elb_client = project_session.client("elbv2") + + elb_arn = elb_client.describe_target_groups( + TargetGroupArns=[ + proj_client.describe_services( + cluster=cluster_name, + services=[ + fully_qualified_service_name, + ], + )["services"][0]["loadBalancers"][0]["targetGroupArn"], + ], + )["TargetGroups"][0]["LoadBalancerArns"][0] + + response = elb_client.describe_load_balancers(LoadBalancerArns=[elb_arn]) + check_response(response) + return response + + +def get_postgres_connection_data_updated_with_master_secret(session, parameter_name, secret_arn): + ssm_client = session.client("ssm") + secrets_manager_client = session.client("secretsmanager") + response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True) + parameter_value = response["Parameter"]["Value"] + + parameter_data = json.loads(parameter_value) + + secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn) + secret_value = json.loads(secret_response["SecretString"]) + + parameter_data["username"] = urllib.parse.quote(secret_value["username"]) + parameter_data["password"] = urllib.parse.quote(secret_value["password"]) + + return parameter_data + + +def get_supported_redis_versions(): + + if cache_refresh_required("redis"): + + supported_versions = [] + + session = get_aws_session_or_abort() + elasticache_client = session.client("elasticache") + + supported_versions_response = elasticache_client.describe_cache_engine_versions( + Engine="redis" + ) + + supported_versions = [ + version["EngineVersion"] + for version in supported_versions_response["CacheEngineVersions"] + ] + + write_to_cache("redis", supported_versions) + + return supported_versions + + else: + return read_supported_versions_from_cache("redis") + + +def get_supported_opensearch_versions(): + + if cache_refresh_required("opensearch"): + + supported_versions = [] + + session = get_aws_session_or_abort() + opensearch_client = session.client("opensearch") + + response = opensearch_client.list_versions() + all_versions = response["Versions"] + + opensearch_versions = [ + version for version in all_versions if not version.startswith("Elasticsearch_") + ] + supported_versions = [ + version.removeprefix("OpenSearch_") for version in opensearch_versions + ] + + write_to_cache("opensearch", supported_versions) + + return supported_versions + + else: + return read_supported_versions_from_cache("opensearch") + + +def get_connection_string( + session: Session, + app: str, + env: str, + db_identifier: str, + connection_data_fn=get_postgres_connection_data_updated_with_master_secret, +) -> str: + addon_name = db_identifier.split(f"{app}-{env}-", 1)[1] + normalised_addon_name = addon_name.replace("-", "_").upper() + connection_string_parameter = ( + f"/copilot/{app}/{env}/secrets/{normalised_addon_name}_READ_ONLY_USER" + ) + master_secret_name = f"/copilot/{app}/{env}/secrets/{normalised_addon_name}_RDS_MASTER_ARN" + master_secret_arn = session.client("ssm").get_parameter( + Name=master_secret_name, WithDecryption=True + )["Parameter"]["Value"] + + conn = connection_data_fn(session, connection_string_parameter, master_secret_arn) + + return f"postgres://{conn['username']}:{conn['password']}@{conn['host']}:{conn['port']}/{conn['dbname']}" + + +class Vpc: + def __init__(self, subnets: list[str], security_groups: list[str]): + self.subnets = subnets + self.security_groups = security_groups + + +def get_vpc_info_by_name(session: Session, app: str, env: str, vpc_name: str) -> Vpc: + ec2_client = session.client("ec2") + vpc_response = ec2_client.describe_vpcs(Filters=[{"Name": "tag:Name", "Values": [vpc_name]}]) + + matching_vpcs = vpc_response.get("Vpcs", []) + + if not matching_vpcs: + raise AWSException(f"VPC not found for name '{vpc_name}'") + + vpc_id = vpc_response["Vpcs"][0].get("VpcId") + + if not vpc_id: + raise AWSException(f"VPC id not present in vpc '{vpc_name}'") + + ec2_resource = session.resource("ec2") + vpc = ec2_resource.Vpc(vpc_id) + + route_tables = ec2_client.describe_route_tables( + Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] + )["RouteTables"] + + subnets = [] + for route_table in route_tables: + private_routes = [route for route in route_table["Routes"] if "NatGatewayId" in route] + if not private_routes: + continue + for association in route_table["Associations"]: + if "SubnetId" in association: + subnet_id = association["SubnetId"] + subnets.append(subnet_id) + + if not subnets: + raise AWSException(f"No private subnets found in vpc '{vpc_name}'") + + tag_value = {"Key": "Name", "Value": f"copilot-{app}-{env}-env"} + sec_groups = [sg.id for sg in vpc.security_groups.all() if sg.tags and tag_value in sg.tags] + + if not sec_groups: + raise AWSException(f"No matching security groups found in vpc '{vpc_name}'") + + return Vpc(subnets, sec_groups) + + +def start_build_extraction(codebuild_client, build_options): + response = codebuild_client.start_build(**build_options) + return response["build"]["arn"] + + +def check_codebase_exists(session: Session, application, codebase: str): + try: + ssm_client = session.client("ssm") + ssm_client.get_parameter( + Name=f"/copilot/applications/{application.name}/codebases/{codebase}" + )["Parameter"]["Value"] + except ( + KeyError, + ValueError, + ssm_client.exceptions.ParameterNotFound, + ): + raise CopilotCodebaseNotFoundError + + +def check_image_exists(session, application, codebase, commit): + ecr_client = session.client("ecr") + try: + ecr_client.describe_images( + repositoryName=f"{application.name}/{codebase}", + imageIds=[{"imageTag": f"commit-{commit}"}], + ) + except ( + ecr_client.exceptions.RepositoryNotFoundException, + ecr_client.exceptions.ImageNotFoundException, + ): + raise ImageNotFoundError + + +def get_build_url_from_arn(build_arn: str) -> str: + _, _, _, region, account_id, project_name, build_id = build_arn.split(":") + project_name = project_name.removeprefix("build/") + return ( + f"https://eu-west-2.console.aws.amazon.com/codesuite/codebuild/{account_id}/projects/" + f"{project_name}/build/{project_name}%3A{build_id}" + ) + + +def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, echo_fn): + paginator = ecr_client.get_paginator("describe_images") + describe_images_response_iterator = paginator.paginate( + repositoryName=ecr_repository_name, + filter={"tagStatus": "TAGGED"}, + ) + images = [] + for page in describe_images_response_iterator: + images += page["imageDetails"] + + sorted_images = sorted( + images, + key=lambda i: i["imagePushedAt"], + reverse=True, + ) + + MAX_RESULTS = 20 + + for image in sorted_images[:MAX_RESULTS]: + try: + commit_tag = next(t for t in image["imageTags"] if t.startswith("commit-")) + if not commit_tag: + continue + + commit_hash = commit_tag.replace("commit-", "") + echo_fn( + f" - https://github.com/{codebase_repository}/commit/{commit_hash} - published: {image['imagePushedAt']}" + ) + except StopIteration: + continue diff --git a/tests/dbt_platform_helper/utils/click.py b/tests/dbt_platform_helper/utils/click.py new file mode 100644 index 000000000..4dd22beaa --- /dev/null +++ b/tests/dbt_platform_helper/utils/click.py @@ -0,0 +1,74 @@ +from click import Argument +from click import Choice +from click import Command +from click import Context +from click import Group +from click import HelpFormatter + + +class ClickDocOptCommand(Command): + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + format_click_usage(ctx, formatter) + + +class ClickDocOptGroup(Group): + command_class = ClickDocOptCommand + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + format_click_usage(ctx, formatter, True) + + +def format_click_usage(ctx: Context, formatter: HelpFormatter, group: bool = False) -> None: + help_text = f"Usage: {ctx.command_path} " + current_line = 0 + indent = len(help_text) + + parameters = list(ctx.command.params) + parameters.sort(key=lambda p: p.required, reverse=True) + parameters.sort(key=lambda p: hasattr(p, "is_flag") and p.is_flag) + parameters.sort(key=lambda p: p.__class__.__name__ == "Option") + + if group: + command_list = list(ctx.command.commands.keys()) + + if len(command_list) == 1: + help_text += f"{command_list[0]} " + elif len(command_list) <= 4: + parameters.insert(0, Argument(["command"], type=Choice(command_list))) + else: + parameters.insert(0, Argument(["command"])) + + for index, param in enumerate(parameters): + if param.__class__.__name__ == "Argument": + if hasattr(param.type, "choices"): + wrap = "(%s) " if param.required else "[(%s)] " + help_text += wrap % "|".join(param.type.choices) + else: + wrap = "<%s> " if param.required else "[<%s>] " + help_text += wrap % param.name + elif param.__class__.__name__ == "Option": + if ( + parameters[index - 1].__class__.__name__ == "Argument" + and not help_text.split("\n")[current_line].isspace() + and len(help_text.split("\n")[current_line]) > 40 + ): + help_text += "\n" + (" " * indent) + current_line += 1 + if getattr(param, "is_flag", False): + wrap = "%s " if param.required else "[%s] " + options = param.opts + if getattr(param, "default", None) is None: + options += param.secondary_opts + help_text += wrap % "|".join(options) + elif hasattr(param.type, "choices"): + wrap = "%s (%s) " if param.required else "[%s (%s)] " + help_text += wrap % (param.opts[0], "|".join(param.type.choices)) + else: + wrap = "%s <%s> " if param.required else "[%s <%s>] " + help_text += wrap % (param.opts[0], param.name) + + if index + 1 != len(parameters) and len(help_text.split("\n")[current_line]) > 70: + help_text += "\n" + (" " * indent) + current_line += 1 + + formatter.write(f"{help_text}\n") diff --git a/tests/dbt_platform_helper/utils/cloudfoundry.py b/tests/dbt_platform_helper/utils/cloudfoundry.py new file mode 100644 index 000000000..1a87d6cf9 --- /dev/null +++ b/tests/dbt_platform_helper/utils/cloudfoundry.py @@ -0,0 +1,14 @@ +import click +from cloudfoundry_client.client import CloudFoundryClient + + +def get_cloud_foundry_client_or_abort(): + try: + client = CloudFoundryClient.build_from_cf_config() + click.secho("Logged in to Cloud Foundry", fg="green") + return client + except Exception as ex: + click.secho("Could not connect to Cloud Foundry: ", fg="red", nl=False) + click.secho(str(ex)) + click.secho("Please log in with: cf login", fg="yellow") + exit(1) diff --git a/tests/dbt_platform_helper/utils/files.py b/tests/dbt_platform_helper/utils/files.py new file mode 100644 index 000000000..553f1c3ea --- /dev/null +++ b/tests/dbt_platform_helper/utils/files.py @@ -0,0 +1,174 @@ +import os +from copy import deepcopy +from datetime import datetime +from os import makedirs +from pathlib import Path + +import click +import yaml +from jinja2 import Environment +from jinja2 import FileSystemLoader + +from dbt_platform_helper.constants import PLATFORM_HELPER_CACHE_FILE + + +def to_yaml(value): + return yaml.dump(value, sort_keys=False) + + +def mkfile(base_path, file_path, contents, overwrite=False): + file_path = Path(file_path) + file = Path(base_path).joinpath(file_path) + file_exists = file.exists() + + if not file_path.parent.exists(): + makedirs(file_path.parent) + + if file_exists and not overwrite: + return f"File {file_path} exists; doing nothing" + + action = "overwritten" if file_exists and overwrite else "created" + + file.write_text(contents) + + return f"File {file_path} {action}" + + +def generate_override_files(base_path, file_path, output_dir): + def generate_files_for_dir(pattern): + for file in file_path.glob(pattern): + if file.is_file(): + contents = file.read_text() + file_name = str(file).removeprefix(f"{file_path}/") + click.echo( + mkfile( + base_path, + output_dir / file_name, + contents, + overwrite=True, + ) + ) + + generate_files_for_dir("*") + generate_files_for_dir("bin/*") + + +def generate_override_files_from_template(base_path, overrides_path, output_dir, template_data={}): + templates = Environment( + loader=FileSystemLoader(f"{overrides_path}"), keep_trailing_newline=True + ) + environments = ",".join([env["name"] for env in template_data["environments"]]) + data = {"environments": environments} + + def generate_files_for_dir(pattern): + + for file in overrides_path.glob(pattern): + if file.is_file(): + file_name = str(file).removeprefix(f"{overrides_path}/") + contents = templates.get_template(str(file_name)).render(data) + message = mkfile(base_path, output_dir / file_name, contents, overwrite=True) + click.echo(message) + + generate_files_for_dir("*") + generate_files_for_dir("bin/*") + + +def apply_environment_defaults(config): + if "environments" not in config: + return config + + enriched_config = deepcopy(config) + + environments = enriched_config["environments"] + env_defaults = environments.get("*", {}) + without_defaults_entry = { + name: data if data else {} for name, data in environments.items() if name != "*" + } + + default_versions = config.get("default_versions", {}) + + def combine_env_data(data): + return { + **env_defaults, + **data, + "versions": { + **default_versions, + **env_defaults.get("versions", {}), + **data.get("versions", {}), + }, + } + + defaulted_envs = { + env_name: combine_env_data(env_data) + for env_name, env_data in without_defaults_entry.items() + } + + enriched_config["environments"] = defaulted_envs + + return enriched_config + + +def read_supported_versions_from_cache(resource_name): + + platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE) + + return platform_helper_config.get(resource_name).get("versions") + + +def write_to_cache(resource_name, supported_versions): + + platform_helper_config = {} + + if os.path.exists(PLATFORM_HELPER_CACHE_FILE): + platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE) + + cache_dict = { + resource_name: { + "versions": supported_versions, + "date-retrieved": datetime.now().strftime("%d-%m-%y %H:%M:%S"), + } + } + + platform_helper_config.update(cache_dict) + + with open(PLATFORM_HELPER_CACHE_FILE, "w") as file: + file.write("# [!] This file is autogenerated via the platform-helper. Do not edit.\n") + yaml.dump(platform_helper_config, file) + + +def cache_refresh_required(resource_name) -> bool: + """ + Checks if the platform-helper should reach out to AWS to 'refresh' its + cached values. + + An API call is needed if any of the following conditions are met: + 1. No cache file (.platform-helper-config.yml) exists. + 2. The resource name (e.g. redis, opensearch) does not exist within the cache file. + 3. The date-retrieved value of the cached data is > than a time interval. In this case 1 day. + """ + + if not os.path.exists(PLATFORM_HELPER_CACHE_FILE): + return True + + platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE) + + if platform_helper_config.get(resource_name): + return check_if_cached_datetime_is_greater_than_interval( + platform_helper_config[resource_name].get("date-retrieved"), 1 + ) + + return True + + +def check_if_cached_datetime_is_greater_than_interval(date_retrieved, interval_in_days): + + current_datetime = datetime.now() + cached_datetime = datetime.strptime(date_retrieved, "%d-%m-%y %H:%M:%S") + delta = current_datetime - cached_datetime + + return delta.days > interval_in_days + + +def read_file_as_yaml(file_name): + + return yaml.safe_load(Path(file_name).read_text()) diff --git a/tests/dbt_platform_helper/utils/git.py b/tests/dbt_platform_helper/utils/git.py new file mode 100644 index 000000000..e451ce652 --- /dev/null +++ b/tests/dbt_platform_helper/utils/git.py @@ -0,0 +1,29 @@ +import re +import subprocess + + +class CommitNotFoundError(Exception): + pass + + +def git_remote(): + git_repo = subprocess.run( + ["git", "remote", "get-url", "origin"], capture_output=True, text=True + ).stdout.strip() + return extract_repository_name(git_repo) + + +def extract_repository_name(repository_url): + if not repository_url: + return + + return re.search(r"([^/:]*/[^/]*)\.git", repository_url).group(1) + + +def check_if_commit_exists(commit): + branches_containing_commit = subprocess.run( + ["git", "branch", "-r", "--contains", f"{commit}"], capture_output=True, text=True + ) + + if branches_containing_commit.stderr: + raise CommitNotFoundError() diff --git a/tests/dbt_platform_helper/utils/manifests.py b/tests/dbt_platform_helper/utils/manifests.py new file mode 100644 index 000000000..ce7a15728 --- /dev/null +++ b/tests/dbt_platform_helper/utils/manifests.py @@ -0,0 +1,18 @@ +import yaml + + +def get_service_name_from_manifest(manifest_path): + with open(manifest_path) as manifest: + document = yaml.safe_load(manifest) + return document["name"] + + +def get_repository_name_from_manifest(manifest_path): + with open(manifest_path) as manifest: + document = yaml.safe_load(manifest) + image = document["image"]["location"] + + repository_with_tag = image.split("/", 1)[1] + repository = repository_with_tag.split(":")[0] + + return repository diff --git a/tests/dbt_platform_helper/utils/messages.py b/tests/dbt_platform_helper/utils/messages.py new file mode 100644 index 000000000..e2b1942e3 --- /dev/null +++ b/tests/dbt_platform_helper/utils/messages.py @@ -0,0 +1,6 @@ +import click + + +def abort_with_error(message): + click.secho(f"Error: {message}", err=True, fg="red") + exit(1) diff --git a/tests/dbt_platform_helper/utils/platform_config.py b/tests/dbt_platform_helper/utils/platform_config.py new file mode 100644 index 000000000..acd543b14 --- /dev/null +++ b/tests/dbt_platform_helper/utils/platform_config.py @@ -0,0 +1,27 @@ +from pathlib import Path + +import yaml + +from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE + + +def _read_config_file_contents(): + if Path(PLATFORM_CONFIG_FILE).exists(): + return Path(PLATFORM_CONFIG_FILE).read_text() + + +def load_unvalidated_config_file(): + file_contents = _read_config_file_contents() + if not file_contents: + return {} + try: + return yaml.safe_load(file_contents) + except yaml.parser.ParserError: + return {} + + +def get_environment_pipeline_names(): + pipelines_config = load_unvalidated_config_file().get("environment_pipelines") + if pipelines_config: + return pipelines_config.keys() + return {} diff --git a/tests/dbt_platform_helper/utils/template.py b/tests/dbt_platform_helper/utils/template.py new file mode 100644 index 000000000..4ad2b9286 --- /dev/null +++ b/tests/dbt_platform_helper/utils/template.py @@ -0,0 +1,20 @@ +import re + +import jinja2 + +from dbt_platform_helper.jinja2_tags import ExtraHeaderTag +from dbt_platform_helper.jinja2_tags import VersionTag + + +def camel_case(s): + s = re.sub(r"(_|-)+", " ", s).title().replace(" ", "") + return "".join([s[0].lower(), s[1:]]) + + +def setup_templates() -> jinja2.Environment: + templateLoader = jinja2.PackageLoader("dbt_platform_helper") + templateEnv = jinja2.Environment(loader=templateLoader, keep_trailing_newline=True) + templateEnv.add_extension(ExtraHeaderTag) + templateEnv.add_extension(VersionTag) + + return templateEnv diff --git a/tests/dbt_platform_helper/utils/validation.py b/tests/dbt_platform_helper/utils/validation.py new file mode 100644 index 000000000..454142879 --- /dev/null +++ b/tests/dbt_platform_helper/utils/validation.py @@ -0,0 +1,825 @@ +import ipaddress +import os +import re +from pathlib import Path + +import click +import yaml +from schema import Optional +from schema import Or +from schema import Regex +from schema import Schema +from schema import SchemaError +from yaml.parser import ParserError +from yamllint import config +from yamllint import linter + +from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY +from dbt_platform_helper.constants import ENVIRONMENTS_KEY +from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE +from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE +from dbt_platform_helper.utils.aws import get_supported_opensearch_versions +from dbt_platform_helper.utils.aws import get_supported_redis_versions +from dbt_platform_helper.utils.files import apply_environment_defaults +from dbt_platform_helper.utils.messages import abort_with_error + + +def validate_string(regex_pattern: str): + def validator(string): + if not re.match(regex_pattern, string): + raise SchemaError( + f"String '{string}' does not match the required pattern '{regex_pattern}'. For more details on valid string patterns see: https://aws.github.io/copilot-cli/docs/manifest/lb-web-service/" + ) + return string + + return validator + + +S3_BUCKET_NAME_ERROR_TEMPLATE = "Bucket name '{}' is invalid:\n{}" +AVAILABILITY_UNCERTAIN_TEMPLATE = ( + "Warning: Could not determine the availability of bucket name '{}'." +) +BUCKET_NAME_IN_USE_TEMPLATE = "Warning: Bucket name '{}' is already in use. Check your AWS accounts to see if this is a problem." + + +def validate_s3_bucket_name(name: str): + errors = [] + if not (2 < len(name) < 64): + errors.append("Length must be between 3 and 63 characters inclusive.") + + if not re.match(r"^[a-z0-9].*[a-z0-9]$", name): + errors.append("Names must start and end with 0-9 or a-z.") + + if not re.match(r"^[a-z0-9.-]*$", name): + errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.") + + if ".." in name: + errors.append("Names cannot contain two adjacent periods.") + + try: + ipaddress.ip_address(name) + errors.append("Names cannot be IP addresses.") + except ValueError: + pass + + for prefix in ("xn--", "sthree-"): + if name.startswith(prefix): + errors.append(f"Names cannot be prefixed '{prefix}'.") + + for suffix in ("-s3alias", "--ol-s3"): + if name.endswith(suffix): + errors.append(f"Names cannot be suffixed '{suffix}'.") + + if errors: + raise SchemaError( + S3_BUCKET_NAME_ERROR_TEMPLATE.format(name, "\n".join(f" {e}" for e in errors)) + ) + + return True + + +def validate_addons(addons: dict): + """ + Validate the addons file and return a dictionary of addon: error message. + """ + errors = {} + + for addon_name, addon in addons.items(): + try: + addon_type = addon.get("type", None) + if not addon_type: + errors[addon_name] = f"Missing addon type in addon '{addon_name}'" + continue + schema = SCHEMA_MAP.get(addon_type, None) + if not schema: + errors[addon_name] = ( + f"Unsupported addon type '{addon_type}' in addon '{addon_name}'" + ) + continue + schema.validate(addon) + except SchemaError as ex: + errors[addon_name] = f"Error in {addon_name}: {ex.code}" + + _validate_extension_supported_versions( + config={"extensions": addons}, + extension_type="redis", + version_key="engine", + get_supported_versions_fn=get_supported_redis_versions, + ) + _validate_extension_supported_versions( + config={"extensions": addons}, + extension_type="opensearch", + version_key="engine", + get_supported_versions_fn=get_supported_opensearch_versions, + ) + + return errors + + +def int_between(lower, upper): + def is_between(value): + if isinstance(value, int) and lower <= value <= upper: + return True + raise SchemaError(f"should be an integer between {lower} and {upper}") + + return is_between + + +def float_between_with_halfstep(lower, upper): + def is_between(value): + is_number = isinstance(value, int) or isinstance(value, float) + is_half_step = re.match(r"^\d+(\.[05])?$", str(value)) + + if is_number and is_half_step and lower <= value <= upper: + return True + raise SchemaError(f"should be a number between {lower} and {upper} in increments of 0.5") + + return is_between + + +ENV_NAME = Regex( + r"^([a-z][a-zA-Z0-9]*|\*)$", + error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment", + # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason. +) + +range_validator = validate_string(r"^\d+-\d+$") +seconds_validator = validate_string(r"^\d+s$") +branch_wildcard_validator = validate_string(r"^((?!\*).)*(\*)?$") + +NUMBER = Or(int, float) +DELETION_POLICY = Or("Delete", "Retain") +DB_DELETION_POLICY = Or("Delete", "Retain", "Snapshot") +DELETION_PROTECTION = bool + +REDIS_PLANS = Or( + "micro", + "micro-ha", + "tiny", + "tiny-ha", + "small", + "small-ha", + "medium", + "medium-ha", + "large", + "large-ha", + "x-large", + "x-large-ha", +) + +REDIS_ENGINE_VERSIONS = str + +REDIS_DEFINITION = { + "type": "redis", + Optional("environments"): { + ENV_NAME: { + Optional("plan"): REDIS_PLANS, + Optional("engine"): REDIS_ENGINE_VERSIONS, + Optional("replicas"): int_between(0, 5), + Optional("deletion_policy"): DELETION_POLICY, + Optional("apply_immediately"): bool, + Optional("automatic_failover_enabled"): bool, + Optional("instance"): str, + Optional("multi_az_enabled"): bool, + } + }, +} + +POSTGRES_PLANS = Or( + "tiny", + "small", + "small-ha", + "small-high-io", + "medium", + "medium-ha", + "medium-high-io", + "large", + "large-ha", + "large-high-io", + "x-large", + "x-large-ha", + "x-large-high-io", +) +POSTGRES_STORAGE_TYPES = Or("gp2", "gp3", "io1", "io2") + +RETENTION_POLICY = Or( + None, + { + "mode": Or("GOVERNANCE", "COMPLIANCE"), + Or("days", "years", only_one=True): int, + }, +) + +DATABASE_COPY = {"from": ENV_NAME, "to": ENV_NAME} + +POSTGRES_DEFINITION = { + "type": "postgres", + "version": NUMBER, + Optional("deletion_policy"): DB_DELETION_POLICY, + Optional("environments"): { + ENV_NAME: { + Optional("plan"): POSTGRES_PLANS, + Optional("volume_size"): int_between(20, 10000), + Optional("iops"): int_between(1000, 9950), + Optional("snapshot_id"): str, + Optional("deletion_policy"): DB_DELETION_POLICY, + Optional("deletion_protection"): DELETION_PROTECTION, + Optional("multi_az"): bool, + Optional("storage_type"): POSTGRES_STORAGE_TYPES, + Optional("backup_retention_days"): int_between(1, 35), + } + }, + Optional("database_copy"): [DATABASE_COPY], + Optional("objects"): [ + { + "key": str, + Optional("body"): str, + } + ], +} + +LIFECYCLE_RULE = { + Optional("filter_prefix"): str, + "expiration_days": int, + "enabled": bool, +} + + +def kms_key_arn_regex(key): + return Regex( + r"^arn:aws:kms:.*:\d{12}:(key|alias).*", + error=f"{key} must contain a valid ARN for a KMS key", + ) + + +def s3_bucket_arn_regex(key): + return Regex( + r"^arn:aws:s3::.*", + error=f"{key} must contain a valid ARN for an S3 bucket", + ) + + +def iam_role_arn_regex(key): + return Regex( + r"^arn:aws:iam::\d{12}:role/.*", + error=f"{key} must contain a valid ARN for an IAM role", + ) + + +DATA_IMPORT = { + Optional("source_kms_key_arn"): kms_key_arn_regex("source_kms_key_arn"), + "source_bucket_arn": s3_bucket_arn_regex("source_bucket_arn"), + "worker_role_arn": iam_role_arn_regex("worker_role_arn"), +} + +DATA_MIGRATION = { + "import": DATA_IMPORT, +} + +S3_BASE = { + Optional("readonly"): bool, + Optional("serve_static_content"): bool, + Optional("services"): Or("__all__", [str]), + Optional("environments"): { + ENV_NAME: { + "bucket_name": validate_s3_bucket_name, + Optional("deletion_policy"): DELETION_POLICY, + Optional("retention_policy"): RETENTION_POLICY, + Optional("versioning"): bool, + Optional("lifecycle_rules"): [LIFECYCLE_RULE], + Optional("data_migration"): DATA_MIGRATION, + } + }, +} + +S3_POLICY_DEFINITION = dict(S3_BASE) +S3_POLICY_DEFINITION.update({"type": "s3-policy"}) + +S3_DEFINITION = dict(S3_BASE) +S3_DEFINITION.update( + { + "type": "s3", + Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}], + } +) + +MONITORING_DEFINITION = { + "type": "monitoring", + Optional("environments"): { + ENV_NAME: { + Optional("enable_ops_center"): bool, + } + }, +} + +OPENSEARCH_PLANS = Or( + "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha" +) +OPENSEARCH_ENGINE_VERSIONS = str +OPENSEARCH_MIN_VOLUME_SIZE = 10 +OPENSEARCH_MAX_VOLUME_SIZE = { + "tiny": 100, + "small": 200, + "small-ha": 200, + "medium": 512, + "medium-ha": 512, + "large": 1000, + "large-ha": 1000, + "x-large": 1500, + "x-large-ha": 1500, +} + +OPENSEARCH_DEFINITION = { + "type": "opensearch", + Optional("environments"): { + ENV_NAME: { + Optional("engine"): OPENSEARCH_ENGINE_VERSIONS, + Optional("deletion_policy"): DELETION_POLICY, + Optional("plan"): OPENSEARCH_PLANS, + Optional("volume_size"): int, + Optional("ebs_throughput"): int, + Optional("ebs_volume_type"): str, + Optional("instance"): str, + Optional("instances"): int, + Optional("master"): bool, + Optional("es_app_log_retention_in_days"): int, + Optional("index_slow_log_retention_in_days"): int, + Optional("audit_log_retention_in_days"): int, + Optional("search_slow_log_retention_in_days"): int, + Optional("password_special_characters"): str, + Optional("urlencode_password"): bool, + } + }, +} + +ALB_DEFINITION = { + "type": "alb", + Optional("environments"): { + ENV_NAME: Or( + { + Optional("additional_address_list"): list, + Optional("allowed_methods"): list, + Optional("cached_methods"): list, + Optional("cdn_compress"): bool, + Optional("cdn_domains_list"): dict, + Optional("cdn_geo_locations"): list, + Optional("cdn_geo_restriction_type"): str, + Optional("cdn_logging_bucket"): str, + Optional("cdn_logging_bucket_prefix"): str, + Optional("cdn_timeout_seconds"): int, + Optional("default_waf"): str, + Optional("domain_prefix"): str, + Optional("enable_logging"): bool, + Optional("env_root"): str, + Optional("forwarded_values_forward"): str, + Optional("forwarded_values_headers"): list, + Optional("forwarded_values_query_string"): bool, + Optional("origin_protocol_policy"): str, + Optional("origin_ssl_protocols"): list, + Optional("viewer_certificate_minimum_protocol_version"): str, + Optional("viewer_certificate_ssl_support_method"): str, + Optional("viewer_protocol_policy"): str, + }, + None, + ) + }, +} + +PROMETHEUS_POLICY_DEFINITION = { + "type": "prometheus-policy", + Optional("services"): Or("__all__", [str]), + Optional("environments"): { + ENV_NAME: { + "role_arn": str, + } + }, +} + +_DEFAULT_VERSIONS_DEFINITION = { + Optional("terraform-platform-modules"): str, + Optional("platform-helper"): str, +} +_ENVIRONMENTS_VERSIONS_OVERRIDES = { + Optional("terraform-platform-modules"): str, +} +_PIPELINE_VERSIONS_OVERRIDES = { + Optional("platform-helper"): str, +} + +_ENVIRONMENTS_PARAMS = { + Optional("accounts"): { + "deploy": { + "name": str, + "id": str, + }, + "dns": { + "name": str, + "id": str, + }, + }, + Optional("requires_approval"): bool, + Optional("versions"): _ENVIRONMENTS_VERSIONS_OVERRIDES, + Optional("vpc"): str, +} + +ENVIRONMENTS_DEFINITION = {str: Or(None, _ENVIRONMENTS_PARAMS)} + +CODEBASE_PIPELINES_DEFINITION = [ + { + "name": str, + "repository": str, + Optional("additional_ecr_repository"): str, + Optional("deploy_repository_branch"): str, + "services": list[str], + "pipelines": [ + Or( + { + "name": str, + "branch": branch_wildcard_validator, + "environments": [ + { + "name": str, + Optional("requires_approval"): bool, + } + ], + }, + { + "name": str, + "tag": bool, + "environments": [ + { + "name": str, + Optional("requires_approval"): bool, + } + ], + }, + ), + ], + }, +] + +ENVIRONMENT_PIPELINES_DEFINITION = { + str: { + Optional("account"): str, + Optional("branch", default="main"): str, + Optional("pipeline_to_trigger"): str, + Optional("versions"): _PIPELINE_VERSIONS_OVERRIDES, + "slack_channel": str, + "trigger_on_push": bool, + "environments": {str: Or(None, _ENVIRONMENTS_PARAMS)}, + } +} + +PLATFORM_CONFIG_SCHEMA = Schema( + { + # The following line is for the AWS Copilot version, will be removed under DBTP-1002 + "application": str, + Optional("legacy_project", default=False): bool, + Optional("default_versions"): _DEFAULT_VERSIONS_DEFINITION, + Optional("accounts"): list[str], + Optional("environments"): ENVIRONMENTS_DEFINITION, + Optional("codebase_pipelines"): CODEBASE_PIPELINES_DEFINITION, + Optional("extensions"): { + str: Or( + REDIS_DEFINITION, + POSTGRES_DEFINITION, + S3_DEFINITION, + S3_POLICY_DEFINITION, + MONITORING_DEFINITION, + OPENSEARCH_DEFINITION, + ALB_DEFINITION, + PROMETHEUS_POLICY_DEFINITION, + ) + }, + Optional("environment_pipelines"): ENVIRONMENT_PIPELINES_DEFINITION, + } +) + + +def validate_platform_config(config): + PLATFORM_CONFIG_SCHEMA.validate(config) + enriched_config = apply_environment_defaults(config) + _validate_environment_pipelines(enriched_config) + _validate_environment_pipelines_triggers(enriched_config) + _validate_codebase_pipelines(enriched_config) + validate_database_copy_section(enriched_config) + + _validate_extension_supported_versions( + config=config, + extension_type="redis", + version_key="engine", + get_supported_versions_fn=get_supported_redis_versions, + ) + _validate_extension_supported_versions( + config=config, + extension_type="opensearch", + version_key="engine", + get_supported_versions_fn=get_supported_opensearch_versions, + ) + + +def _validate_extension_supported_versions( + config, extension_type, version_key, get_supported_versions_fn +): + + extensions = config.get("extensions", {}) + if not extensions: + return + + extensions_for_type = [ + extension + for extension in config.get("extensions", {}).values() + if extension.get("type") == extension_type + ] + + supported_extension_versions = get_supported_versions_fn() + extensions_with_invalid_version = [] + + for extension in extensions_for_type: + + environments = extension.get("environments", {}) + + if not isinstance(environments, dict): + click.secho( + "Error: Opensearch extension definition is invalid type, expected dictionary", + fg="red", + ) + continue + for environment, env_config in environments.items(): + extension_version = env_config.get(version_key) + if extension_version not in supported_extension_versions: + extensions_with_invalid_version.append( + {"environment": environment, "version": extension_version} + ) + + for version_failure in extensions_with_invalid_version: + click.secho( + f"{extension_type} version for environment {version_failure['environment']} is not in the list of supported {extension_type} versions: {supported_extension_versions}. Provided Version: {version_failure['version']}", + fg="red", + ) + + +def validate_database_copy_section(config): + extensions = config.get("extensions", {}) + if not extensions: + return + + postgres_extensions = { + key: ext for key, ext in extensions.items() if ext.get("type", None) == "postgres" + } + + if not postgres_extensions: + return + + errors = [] + + for extension_name, extension in postgres_extensions.items(): + database_copy_sections = extension.get("database_copy", []) + + if not database_copy_sections: + return + + all_environments = [env for env in config.get("environments", {}).keys() if not env == "*"] + all_envs_string = ", ".join(all_environments) + + for section in database_copy_sections: + from_env = section["from"] + to_env = section["to"] + + if from_env == to_env: + errors.append( + f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'." + ) + + if "prod" in to_env: + errors.append( + f"Copying to a prod environment is not supported: database_copy 'to' cannot be '{to_env}' in extension '{extension_name}'." + ) + + if from_env not in all_environments: + errors.append( + f"database_copy 'from' parameter must be a valid environment ({all_envs_string}) but was '{from_env}' in extension '{extension_name}'." + ) + + if to_env not in all_environments: + errors.append( + f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'." + ) + + if errors: + abort_with_error("\n".join(errors)) + + +def _validate_environment_pipelines(config): + bad_pipelines = {} + for pipeline_name, pipeline in config.get("environment_pipelines", {}).items(): + bad_envs = [] + pipeline_account = pipeline.get("account", None) + if pipeline_account: + for env in pipeline.get("environments", {}).keys(): + env_account = ( + config.get("environments", {}) + .get(env, {}) + .get("accounts", {}) + .get("deploy", {}) + .get("name") + ) + if not env_account == pipeline_account: + bad_envs.append(env) + if bad_envs: + bad_pipelines[pipeline_name] = {"account": pipeline_account, "bad_envs": bad_envs} + if bad_pipelines: + message = "The following pipelines are misconfigured:" + for pipeline, detail in bad_pipelines.items(): + envs = detail["bad_envs"] + acc = detail["account"] + message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n" + abort_with_error(message) + + +def _validate_codebase_pipelines(config): + if CODEBASE_PIPELINES_KEY in config: + for codebase in config[CODEBASE_PIPELINES_KEY]: + codebase_environments = [] + + for pipeline in codebase["pipelines"]: + codebase_environments += [e["name"] for e in pipeline[ENVIRONMENTS_KEY]] + + unique_codebase_environments = sorted(list(set(codebase_environments))) + + if sorted(codebase_environments) != sorted(unique_codebase_environments): + abort_with_error( + f"The {PLATFORM_CONFIG_FILE} file is invalid, each environment can only be " + "listed in a single pipeline per codebase" + ) + + +def _validate_environment_pipelines_triggers(config): + errors = [] + pipelines_with_triggers = { + pipeline_name: pipeline + for pipeline_name, pipeline in config.get("environment_pipelines", {}).items() + if "pipeline_to_trigger" in pipeline + } + + for pipeline_name, pipeline in pipelines_with_triggers.items(): + pipeline_to_trigger = pipeline["pipeline_to_trigger"] + if pipeline_to_trigger not in config.get("environment_pipelines", {}): + message = f" '{pipeline_name}' - '{pipeline_to_trigger}' is not a valid target pipeline to trigger" + + errors.append(message) + continue + + if pipeline_to_trigger == pipeline_name: + message = f" '{pipeline_name}' - pipelines cannot trigger themselves" + errors.append(message) + + if errors: + error_message = "The following pipelines are misconfigured: \n" + abort_with_error(error_message + "\n ".join(errors)) + + +def lint_yaml_for_duplicate_keys(file_path): + lint_yaml_config = """ +rules: + key-duplicates: enable +""" + yaml_config = config.YamlLintConfig(lint_yaml_config) + + with open(file_path, "r") as yaml_file: + file_contents = yaml_file.read() + results = linter.run(file_contents, yaml_config) + + parsed_results = [ + "\t" + f"Line {result.line}: {result.message}".replace(" in mapping (key-duplicates)", "") + for result in results + ] + + return parsed_results + + +def load_and_validate_platform_config(path=PLATFORM_CONFIG_FILE, disable_file_check=False): + if not disable_file_check: + config_file_check(path) + try: + conf = yaml.safe_load(Path(path).read_text()) + duplicate_keys = lint_yaml_for_duplicate_keys(path) + if duplicate_keys: + abort_with_error( + "Duplicate keys found in platform-config:" + + os.linesep + + os.linesep.join(duplicate_keys) + ) + validate_platform_config(conf) + return conf + except ParserError: + abort_with_error(f"{PLATFORM_CONFIG_FILE} is not valid YAML") + except SchemaError as e: + abort_with_error(f"Schema error in {PLATFORM_CONFIG_FILE}. {e}") + + +def config_file_check(path=PLATFORM_CONFIG_FILE): + platform_config_exists = Path(path).exists() + errors = [] + warnings = [] + + messages = { + "storage.yml": {"instruction": " under the key 'extensions'", "type": errors}, + "extensions.yml": {"instruction": " under the key 'extensions'", "type": errors}, + "pipelines.yml": { + "instruction": ", change the key 'codebases' to 'codebase_pipelines'", + "type": errors, + }, + PLATFORM_HELPER_VERSION_FILE: { + "instruction": ", under the key `default_versions: platform-helper:`", + "type": warnings, + }, + } + + for file in messages.keys(): + if Path(file).exists(): + message = ( + f"`{file}` is no longer supported. Please move its contents into the " + f"`{PLATFORM_CONFIG_FILE}` file{messages[file]['instruction']} and delete `{file}`." + ) + messages[file]["type"].append(message) + + if not errors and not warnings and not platform_config_exists: + errors.append( + f"`{PLATFORM_CONFIG_FILE}` is missing. " + "Please check it exists and you are in the root directory of your deployment project." + ) + + if warnings: + click.secho("\n".join(warnings), bg="yellow", fg="black") + if errors: + click.secho("\n".join(errors), bg="red", fg="white") + exit(1) + + +S3_SCHEMA = Schema(S3_DEFINITION) +S3_POLICY_SCHEMA = Schema(S3_POLICY_DEFINITION) +POSTGRES_SCHEMA = Schema(POSTGRES_DEFINITION) +REDIS_SCHEMA = Schema(REDIS_DEFINITION) + + +class ConditionalSchema(Schema): + def validate(self, data, _is_conditional_schema=True): + data = super(ConditionalSchema, self).validate(data, _is_conditional_schema=False) + if _is_conditional_schema: + default_plan = None + default_volume_size = None + + default_environment_config = data["environments"].get( + "*", data["environments"].get("default", None) + ) + if default_environment_config: + default_plan = default_environment_config.get("plan", None) + default_volume_size = default_environment_config.get("volume_size", None) + + for env in data["environments"]: + volume_size = data["environments"][env].get("volume_size", default_volume_size) + plan = data["environments"][env].get("plan", default_plan) + + if volume_size: + if not plan: + raise SchemaError(f"Missing key: 'plan'") + + if volume_size < OPENSEARCH_MIN_VOLUME_SIZE: + raise SchemaError( + f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {OPENSEARCH_MIN_VOLUME_SIZE}" + ) + + for key in OPENSEARCH_MAX_VOLUME_SIZE: + if plan == key and not volume_size <= OPENSEARCH_MAX_VOLUME_SIZE[key]: + raise SchemaError( + f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {OPENSEARCH_MIN_VOLUME_SIZE} and {OPENSEARCH_MAX_VOLUME_SIZE[key]} for plan {plan}" + ) + + return data + + +OPENSEARCH_SCHEMA = ConditionalSchema(OPENSEARCH_DEFINITION) +MONITORING_SCHEMA = Schema(MONITORING_DEFINITION) +ALB_SCHEMA = Schema(ALB_DEFINITION) +PROMETHEUS_POLICY_SCHEMA = Schema(PROMETHEUS_POLICY_DEFINITION) + + +def no_param_schema(schema_type): + return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])}) + + +SCHEMA_MAP = { + "s3": S3_SCHEMA, + "s3-policy": S3_POLICY_SCHEMA, + "postgres": POSTGRES_SCHEMA, + "redis": REDIS_SCHEMA, + "opensearch": OPENSEARCH_SCHEMA, + "monitoring": MONITORING_SCHEMA, + "appconfig-ipfilter": no_param_schema("appconfig-ipfilter"), + "subscription-filter": no_param_schema("subscription-filter"), + "vpc": no_param_schema("vpc"), + "xray": no_param_schema("xray"), + "alb": ALB_SCHEMA, + "prometheus-policy": PROMETHEUS_POLICY_SCHEMA, +} diff --git a/tests/dbt_platform_helper/utils/versioning.py b/tests/dbt_platform_helper/utils/versioning.py new file mode 100644 index 000000000..5c87ee42e --- /dev/null +++ b/tests/dbt_platform_helper/utils/versioning.py @@ -0,0 +1,299 @@ +import os +import re +import subprocess +from importlib.metadata import PackageNotFoundError +from importlib.metadata import version +from pathlib import Path +from typing import Optional +from typing import Tuple +from typing import Union + +import click +import requests + +from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE +from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE +from dbt_platform_helper.exceptions import IncompatibleMajorVersion +from dbt_platform_helper.exceptions import IncompatibleMinorVersion +from dbt_platform_helper.exceptions import ValidationException +from dbt_platform_helper.utils.platform_config import load_unvalidated_config_file + +VersionTuple = Optional[Tuple[int, int, int]] + + +class Versions: + def __init__(self, local_version: VersionTuple = None, latest_release: VersionTuple = None): + self.local_version = local_version + self.latest_release = latest_release + + +class PlatformHelperVersions: + def __init__( + self, + local_version: VersionTuple = None, + latest_release: VersionTuple = None, + platform_helper_file_version: VersionTuple = None, + platform_config_default: VersionTuple = None, + pipeline_overrides: dict[str, str] = None, + ): + self.local_version = local_version + self.latest_release = latest_release + self.platform_helper_file_version = platform_helper_file_version + self.platform_config_default = platform_config_default + self.pipeline_overrides = pipeline_overrides if pipeline_overrides else {} + + +def string_version(input_version: VersionTuple) -> str: + if input_version is None: + return "unknown" + major, minor, patch = input_version + return ".".join([str(s) for s in [major, minor, patch]]) + + +def parse_version(input_version: Union[str, None]) -> VersionTuple: + if input_version is None: + return None + + version_plain = input_version.replace("v", "") + version_segments = re.split(r"[.\-]", version_plain) + + if len(version_segments) != 3: + return None + + output_version = [0, 0, 0] + for index, segment in enumerate(version_segments): + try: + output_version[index] = int(segment) + except ValueError: + output_version[index] = -1 + return output_version[0], output_version[1], output_version[2] + + +def get_copilot_versions() -> Versions: + copilot_version = None + + try: + response = subprocess.run("copilot --version", capture_output=True, shell=True) + [copilot_version] = re.findall(r"[0-9.]+", response.stdout.decode("utf8")) + except ValueError: + pass + + return Versions(parse_version(copilot_version), get_github_released_version("aws/copilot-cli")) + + +def get_aws_versions() -> Versions: + aws_version = None + try: + response = subprocess.run("aws --version", capture_output=True, shell=True) + matched = re.match(r"aws-cli/([0-9.]+)", response.stdout.decode("utf8")) + aws_version = parse_version(matched.group(1)) + except ValueError: + pass + + return Versions(aws_version, get_github_released_version("aws/aws-cli", True)) + + +def get_github_released_version(repository: str, tags: bool = False) -> Tuple[int, int, int]: + if tags: + tags_list = requests.get(f"https://api.github.com/repos/{repository}/tags").json() + versions = [parse_version(v["name"]) for v in tags_list] + versions.sort(reverse=True) + return versions[0] + + package_info = requests.get(f"https://api.github.com/repos/{repository}/releases/latest").json() + return parse_version(package_info["tag_name"]) + + +def _get_latest_release(): + package_info = requests.get("https://pypi.org/pypi/dbt-platform-helper/json").json() + released_versions = package_info["releases"].keys() + parsed_released_versions = [parse_version(v) for v in released_versions] + parsed_released_versions.sort(reverse=True) + return parsed_released_versions[0] + + +def get_platform_helper_versions(include_project_versions=True) -> PlatformHelperVersions: + try: + locally_installed_version = parse_version(version("dbt-platform-helper")) + except PackageNotFoundError: + locally_installed_version = None + + latest_release = _get_latest_release() + + if not include_project_versions: + return PlatformHelperVersions( + local_version=locally_installed_version, + latest_release=latest_release, + ) + + deprecated_version_file = Path(PLATFORM_HELPER_VERSION_FILE) + version_from_file = ( + parse_version(deprecated_version_file.read_text()) + if deprecated_version_file.exists() + else None + ) + + platform_config_default, pipeline_overrides = None, {} + + platform_config = load_unvalidated_config_file() + + if platform_config: + platform_config_default = parse_version( + platform_config.get("default_versions", {}).get("platform-helper") + ) + + pipeline_overrides = { + name: pipeline.get("versions", {}).get("platform-helper") + for name, pipeline in platform_config.get("environment_pipelines", {}).items() + if pipeline.get("versions", {}).get("platform-helper") + } + + out = PlatformHelperVersions( + local_version=locally_installed_version, + latest_release=latest_release, + platform_helper_file_version=version_from_file, + platform_config_default=platform_config_default, + pipeline_overrides=pipeline_overrides, + ) + + _process_version_file_warnings(out) + + return out + + +def _process_version_file_warnings(versions: PlatformHelperVersions): + messages = [] + missing_default_version_message = f"Create a section in the root of '{PLATFORM_CONFIG_FILE}':\n\ndefault_versions:\n platform-helper: " + deprecation_message = f"Please delete '{PLATFORM_HELPER_VERSION_FILE}' as it is now deprecated." + + if versions.platform_config_default and versions.platform_helper_file_version: + messages.append(deprecation_message) + + if versions.platform_config_default and not versions.platform_helper_file_version: + return + + if not versions.platform_config_default and versions.platform_helper_file_version: + messages.append(deprecation_message) + messages.append( + f"{missing_default_version_message}{string_version(versions.platform_helper_file_version)}\n" + ) + + if not versions.platform_config_default and not versions.platform_helper_file_version: + message = f"Cannot get dbt-platform-helper version from '{PLATFORM_CONFIG_FILE}'.\n" + message += f"{missing_default_version_message}{string_version(versions.local_version)}\n" + click.secho(message, fg="red") + + if messages: + click.secho("\n".join(messages), fg="yellow") + + +def validate_version_compatibility( + app_version: Tuple[int, int, int], check_version: Tuple[int, int, int] +): + app_major, app_minor, app_patch = app_version + check_major, check_minor, check_patch = check_version + app_version_as_string = string_version(app_version) + check_version_as_string = string_version(check_version) + + if (app_major == 0 and check_major == 0) and ( + app_minor != check_minor or app_patch != check_patch + ): + raise IncompatibleMajorVersion(app_version_as_string, check_version_as_string) + + if app_major != check_major: + raise IncompatibleMajorVersion(app_version_as_string, check_version_as_string) + + if app_minor != check_minor: + raise IncompatibleMinorVersion(app_version_as_string, check_version_as_string) + + +def check_version_on_file_compatibility( + app_version: Tuple[int, int, int], file_version: Tuple[int, int, int] +): + app_major, app_minor, app_patch = app_version + file_major, file_minor, file_patch = file_version + + return app_major == file_major and app_minor == file_minor and app_patch == file_patch + + +def get_template_generated_with_version(template_file_path: str) -> Tuple[int, int, int]: + try: + template_contents = Path(template_file_path).read_text() + template_version = re.match( + r"# Generated by platform-helper ([v.\-0-9]+)", template_contents + ).group(1) + return parse_version(template_version) + except (IndexError, AttributeError): + raise ValidationException(f"Template {template_file_path} has no version information") + + +def validate_template_version(app_version: Tuple[int, int, int], template_file_path: str): + validate_version_compatibility( + app_version, + get_template_generated_with_version(template_file_path), + ) + + +def check_platform_helper_version_needs_update(): + if not running_as_installed_package() or "PLATFORM_TOOLS_SKIP_VERSION_CHECK" in os.environ: + return + + versions = get_platform_helper_versions(include_project_versions=False) + local_version = versions.local_version + latest_release = versions.latest_release + message = ( + f"You are running platform-helper v{string_version(local_version)}, upgrade to " + f"v{string_version(latest_release)} by running run `pip install " + "--upgrade dbt-platform-helper`." + ) + try: + validate_version_compatibility(local_version, latest_release) + except IncompatibleMajorVersion: + click.secho(message, fg="red") + except IncompatibleMinorVersion: + click.secho(message, fg="yellow") + + +def check_platform_helper_version_mismatch(): + if not running_as_installed_package(): + return + + versions = get_platform_helper_versions() + local_version = versions.local_version + platform_helper_file_version = parse_version( + get_required_platform_helper_version(versions=versions) + ) + + if not check_version_on_file_compatibility(local_version, platform_helper_file_version): + message = ( + f"WARNING: You are running platform-helper v{string_version(local_version)} against " + f"v{string_version(platform_helper_file_version)} specified by {PLATFORM_HELPER_VERSION_FILE}." + ) + click.secho(message, fg="red") + + +def running_as_installed_package(): + return "site-packages" in __file__ + + +def get_required_platform_helper_version( + pipeline: str = None, versions: PlatformHelperVersions = None +) -> str: + if not versions: + versions = get_platform_helper_versions() + pipeline_version = versions.pipeline_overrides.get(pipeline) + version_precedence = [ + pipeline_version, + versions.platform_config_default, + versions.platform_helper_file_version, + ] + non_null_version_precedence = [ + string_version(v) if isinstance(v, tuple) else v for v in version_precedence if v + ] + + out = non_null_version_precedence[0] if non_null_version_precedence else None + + if not out: + raise SystemExit(1) + + return out