diff --git a/.config b/.config new file mode 100644 index 0000000..2a4491b --- /dev/null +++ b/.config @@ -0,0 +1,2 @@ +BOOTSTRAP_ROOT=$(dirname $0) +VARIABLES_PATH=$(realpath $BOOTSTRAP_ROOT/infrastructure/variables.tfvars) diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml index d2c1984..8afab1a 100644 --- a/.github/workflows/base.yml +++ b/.github/workflows/base.yml @@ -2,14 +2,11 @@ name: Lambda Boilerplate on: [push] jobs: - lint-and-validate: + terraform-lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.8.2' - - name: Install dependencies - run: CI=1 . script/bootstrap - - name: Lint - run: inv lint + - uses: hashicorp/setup-terraform@v2 + - run: | + . script/bootstrap + . script/terraform-lint diff --git a/.gitignore b/.gitignore index 1cf9d6d..c20bd75 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +bin *.venv *.sw* *.pyc @@ -6,4 +7,5 @@ *.terraform* *tfstate* *tfvars +.tflint .coverage diff --git a/README.md b/README.md index 2cf387c..d297e21 100644 --- a/README.md +++ b/README.md @@ -9,23 +9,20 @@ AWS Lambdas are fun, but often the amount of boilerplate involved in getting a p ## Local development -To get started, hit the bootstrap script with `. script/bootstrap`. This will set up a Python 3.8 virtualenv set up with some basic tools that will make your life easier. - The base Lambda handler is at `src/base.py` and all the Terraform configurations are in `infrastructure`. [Read more about Sceptre](https://sceptre.cloudreach.com/latest/index.html://www.terraform.io/docs/index.html) [Read more about AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html) -### Invocations +### Tooling -This template uses PyInvoke, all commands are of the format `inv `. - -Use `inv --list` for the full list of commands. +All the tooling is implemented using the [One Script to Rule Them +All](https://github.com/github/scripts-to-rule-them-all) paradigm and can be found under `script`. ## Deployment -Deployment is in three steps: on first setup, you will need to make sure that your `bootstrap` environment is ready via `inv cloud.apply bootstrap`. Then, you should upload your lambdas' source with `inv cloud.pack` and `inv cloud.push`. Finally, you can deploy your application resources with `inv cloud.deploy app`. +Deployment is in three steps: on first setup, you will need to make sure that your `bootstrap` environment is ready via `PROJECT=bootstrap . script/apply`. Then, you should prepare your source code package (dependent on the language and framework you're using, you have to supply this bit!) and `ARCHIVE= . script/push`. Finally, you can deploy your application resources with `PROJECT=app . script/apply`. ## Contributing diff --git a/infrastructure/app/app.tf b/infrastructure/app/app.tf index 6733917..e160479 100644 --- a/infrastructure/app/app.tf +++ b/infrastructure/app/app.tf @@ -1,11 +1,19 @@ -provider aws { - profile = "default" - region = var.aws_region +terraform { + required_version = ">=1.0" + + required_providers { + aws = "4.29.0" + } +} + +provider "aws" { + profile = "default" + region = var.aws_region } resource "aws_iam_role" "lambda_role" { - name = "lambda_role" - assume_role_policy = < FUNCTION_NAME= . script/invoke + +( + source $(dirname $0)/../.config + + cd $PROJECT_ROOT + + aws lambda invoke --endpoint http://localhost:9001 --no-sign-request --function-name $FUNCTION_NAME --log-type Tail + --payload $PAYLOAD $FUNCTION_NAME_out.json" + ) + +) diff --git a/script/plan b/script/plan new file mode 100644 index 0000000..12f2a9f --- /dev/null +++ b/script/plan @@ -0,0 +1,17 @@ +#!/usr/bin/bash + +# Generates a plan for the given project. +# +# The project name is expected to be passed as an environment variable, +# i.e. PROJECT=app . script/plan + +( + source $(dirname $0)/../.config + + PROJECT_ROOT=$(realpath $BOOTSTRAP_ROOT/infrastructure/$PROJECT) + + cd $PROJECT_ROOT + + terraform init + terraform plan --var-file $VARIABLES_PATH +) diff --git a/script/push b/script/push new file mode 100644 index 0000000..6d70300 --- /dev/null +++ b/script/push @@ -0,0 +1,20 @@ +#!/usr/bin/bash + +# Pushes the lambda archive to S3. +# +# This is a pre-condition to script/apply since creating +# the Lambda function will expect the source code to be +# available in the artifacts bucket. +# +# The project name is expected to be passed as an environment variable, +# i.e. ARCHIVE= . script/push + +( + source $(dirname $0)/../.config + + BOOTSTRAP_PROJECT=$(realpath $BOOTSTRAP_ROOT/infrastructure/bootstrap) + cd $BOOTSTRAP_PROJECT + BUCKET_NAME=$(terraform output --json | jq .artifacts_bucket_name.value -r) + cd - + aws s3 cp $ARCHIVE s3://$BUCKET_NAME +) diff --git a/script/start b/script/start new file mode 100644 index 0000000..ce72427 --- /dev/null +++ b/script/start @@ -0,0 +1,11 @@ +#!/usr/bin/bash + +# Starts a local instance of the lambda. + +( + source $(dirname $0)/../.config + + cd $PROJECT_ROOT + + docker-compose up -d --build +) diff --git a/script/stop b/script/stop new file mode 100644 index 0000000..e8f3522 --- /dev/null +++ b/script/stop @@ -0,0 +1,11 @@ +#!/usr/bin/bash + +# Stops a running local instange of the lambda. + +( + source $(dirname $0)/../.config + + cd $PROJECT_ROOT + + docker-compose down +) diff --git a/script/terraform-lint b/script/terraform-lint new file mode 100644 index 0000000..d17233a --- /dev/null +++ b/script/terraform-lint @@ -0,0 +1,19 @@ +#!/bin/bash + +# Lints and formats terraform files. + +( + TFLINT_PATH=$(realpath ./.tflint/tflint) + + for PROJECT in "bootstrap" "app" + do + ( + PROJECT_ROOT=./infrastructure/$PROJECT + echo ">>> Linting $PROJECT_ROOT" + cd $PROJECT_ROOT + terraform init + $TFLINT_PATH # --var-file ../variables.tfvars + terraform fmt -write=true + ) + done +) diff --git a/tasks.py b/tasks.py deleted file mode 100644 index 8668706..0000000 --- a/tasks.py +++ /dev/null @@ -1,218 +0,0 @@ -from invoke import task, Collection -import boto3 -import os -from pathlib import Path -import hashlib -import re -from typing import List, Dict - -BASE_PATH = str(Path(__file__).parent.absolute()) -VARIABLES_PATH = "../variables.tfvars" - -HELP_SEGMENTS = { - "project": "Project name, either app or bootstrap", - "archive": "Archive file", - "function_name": "Name of the Lambda to invoke locally (as defined in the Cloudformation template)", - "payload": "JSON payload to include in the trigger event", - "fix": "Whether to fix errors", - "env": "Environment (dev or prod)", - "package": "Target package (incl. range)", -} - - -def _compose_path(path: str) -> str: - return str(Path(BASE_PATH, path).absolute()) - - -def _build_help_dict(segments: List[str]) -> Dict[str, str]: - return {segment: HELP_SEGMENTS[segment] for segment in segments} - - -PROJECT_PATHS = { - "app": _compose_path("infrastructure/app"), - "bootstrap": _compose_path("infrastructure/bootstrap"), -} - -##################### -# Cloud invocations # -##################### - - -@task(name="plan", help=_build_help_dict(["project"])) -def cloud_plan(ctx, project): - """ - Builds the Terraform plan for the given project. - """ - with ctx.cd(PROJECT_PATHS[project]): - ctx.run("terraform init") - ctx.run(f"terraform plan --var-file {VARIABLES_PATH}") - - -@task(name="apply", help=_build_help_dict(["project"])) -def cloud_apply(ctx, project): - """ - Applies infrastructure changes to the given project. - """ - with ctx.cd(PROJECT_PATHS[project]): - ctx.run("terraform init") - ctx.run("terraform taint --allow-missing aws_lambda_function.apgnd_lambda_func") - ctx.run("terraform taint --allow-missing aws_lambda_permission.apigw") - ctx.run(f"terraform apply --var-file {VARIABLES_PATH}") - - -@task(name="destroy", help=_build_help_dict(["project"])) -def cloud_destroy(ctx, project): - """ - Destroys resources associated with the given project. - """ - with ctx.cd(PROJECT_PATHS[project]): - ctx.run("terraform init") - ctx.run(f"terraform destroy --var-file {VARIABLES_PATH}") - - -@task(name="pack") -def cloud_pack(ctx): - """ - Prepares and packages the source code for lambdas. - """ - with ctx.cd(BASE_PATH): - ctx.run("pip install -r requirements.txt --target package/") - ctx.run("zip -r lambda_function.zip src/*") - - with ctx.cd(_compose_path("package")): - ctx.run("zip -r ../lambda_function.zip ./") - - -@task(name="push", help=_build_help_dict(["archive"])) -def cloud_push(ctx, archive): - """ - Pushes the given archive to S3. - """ - artifacts_bucket = None - - with ctx.cd(_compose_path(PROJECT_PATHS["bootstrap"])): - out = ctx.run("terraform output", hide="out").stdout - artifacts_bucket_match = re.match( - 'artifacts_bucket_name = "(?P[0-9a-zA-Z-_]+)"\n?', out - ) - - artifacts_bucket = artifacts_bucket_match.group("bucket_name") - - with ctx.cd(BASE_PATH): - ctx.run(f"aws s3 cp {archive} s3://{artifacts_bucket}", hide="out") - - print(f"Uploaded {archive} to s3 ({artifacts_bucket})!") - - -##################### -# Local invocations # -##################### - - -@task(name="start") -def local_start(ctx): - """ - Starts your stack locally. - """ - ctx.run("docker-compose up -d --build") - - -@task(name="stop") -def local_stop(ctx): - """ - Stops your local stack. - """ - ctx.run("docker-compose down") - - -@task( - name="invoke", - help=_build_help_dict(["function_name", "payload"]), -) -def local_invoke(ctx, function_name, payload): - """ - Triggers the local lambda with the given payload - """ - ctx.run( - f"aws lambda invoke --endpoint http://localhost:9001 --no-sign-request --function-name {function_name} --log-type Tail --payload {payload} {function_name}_out.json" - ) - - -##################### -# Other invocations # -#################### - - -@task(name="lock") -def lock_requirements(ctx): - """ - Builds the pip lockfile - """ - with ctx.cd(BASE_PATH): - ctx.run("python -m piptools compile requirements.in", hide="both") - ctx.run( - "python -m piptools compile requirements_dev.in --output-file requirements_dev.txt", - hide="both", - ) - - -@task(name="update", help=_build_help_dict(["env", "package"])) -def update_requirements(ctx, env, package): - """ - Updates a package an regenerates the lockfiles. - """ - deps = None - - if env == "prod": - deps = "requirements.in" - elif env == "dev": - deps = "requirements_dev.in" - else: - raise ValueError("Invalid env") - - with ctx.cd(BASE_PATH): - ctx.run(f"python -m piptools compile {deps} --upgrade-package {package}") - - -@task(name="lint", help=_build_help_dict(["fix"])) -def lint(ctx, fix=False): - """ - Lints - """ - with ctx.cd(BASE_PATH): - ctx.run("black *.py **/*.py" + (" --check" if not fix else "")) - - -@task(name="test") -def test(ctx): - """ - Runs tests - """ - with ctx.cd(BASE_PATH): - ctx.run("pytest --cov=src") - - -ns = Collection() - -local = Collection("local") -local.add_task(local_start) -local.add_task(local_stop) -local.add_task(local_invoke) - -cloud = Collection("cloud") -cloud.add_task(cloud_plan) -cloud.add_task(cloud_apply) -cloud.add_task(cloud_destroy) -cloud.add_task(cloud_pack) -cloud.add_task(cloud_push) - -project = Collection("requirements") -project.add_task(lock_requirements) -project.add_task(update_requirements) - -ns.add_collection(local) -ns.add_collection(cloud) -ns.add_collection(project) - -ns.add_task(lint) -ns.add_task(test)