refactor: pure bash instead of python scripts (#9)

docs: instructions around tooling

infra: fix linting and CI
This commit is contained in:
Marc 2022-09-23 01:11:41 -04:00 committed by GitHub
parent 08da99b99c
commit 494bb4d681
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 229 additions and 508 deletions

2
.config Normal file
View file

@ -0,0 +1,2 @@
BOOTSTRAP_ROOT=$(dirname $0)
VARIABLES_PATH=$(realpath $BOOTSTRAP_ROOT/infrastructure/variables.tfvars)

View file

@ -2,14 +2,11 @@ name: Lambda Boilerplate
on: [push] on: [push]
jobs: jobs:
lint-and-validate: terraform-lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: hashicorp/setup-terraform@v2
with: - run: |
python-version: '3.8.2' . script/bootstrap
- name: Install dependencies . script/terraform-lint
run: CI=1 . script/bootstrap
- name: Lint
run: inv lint

2
.gitignore vendored
View file

@ -1,3 +1,4 @@
bin
*.venv *.venv
*.sw* *.sw*
*.pyc *.pyc
@ -6,4 +7,5 @@
*.terraform* *.terraform*
*tfstate* *tfstate*
*tfvars *tfvars
.tflint
.coverage .coverage

View file

@ -9,23 +9,20 @@ AWS Lambdas are fun, but often the amount of boilerplate involved in getting a p
## Local development ## Local development
To get started, hit the bootstrap script with `. script/bootstrap`. This will set up a Python 3.8 virtualenv set up with some basic tools that will make your life easier.
The base Lambda handler is at `src/base.py` and all the Terraform configurations are in `infrastructure`. The base Lambda handler is at `src/base.py` and all the Terraform configurations are in `infrastructure`.
[Read more about Sceptre](https://sceptre.cloudreach.com/latest/index.html://www.terraform.io/docs/index.html) [Read more about Sceptre](https://sceptre.cloudreach.com/latest/index.html://www.terraform.io/docs/index.html)
[Read more about AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html) [Read more about AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html)
### Invocations ### Tooling
This template uses PyInvoke, all commands are of the format `inv <command> <parameters>`. All the tooling is implemented using the [One Script to Rule Them
All](https://github.com/github/scripts-to-rule-them-all) paradigm and can be found under `script`.
Use `inv --list` for the full list of commands.
## Deployment ## Deployment
Deployment is in three steps: on first setup, you will need to make sure that your `bootstrap` environment is ready via `inv cloud.apply bootstrap`. Then, you should upload your lambdas' source with `inv cloud.pack` and `inv cloud.push`. Finally, you can deploy your application resources with `inv cloud.deploy app`. Deployment is in three steps: on first setup, you will need to make sure that your `bootstrap` environment is ready via `PROJECT=bootstrap . script/apply`. Then, you should prepare your source code package (dependent on the language and framework you're using, you have to supply this bit!) and `ARCHIVE=<path-to-zip> . script/push`. Finally, you can deploy your application resources with `PROJECT=app . script/apply`.
## Contributing ## Contributing

View file

@ -1,11 +1,19 @@
provider aws { terraform {
profile = "default" required_version = ">=1.0"
region = var.aws_region
required_providers {
aws = "4.29.0"
}
}
provider "aws" {
profile = "default"
region = var.aws_region
} }
resource "aws_iam_role" "lambda_role" { resource "aws_iam_role" "lambda_role" {
name = "lambda_role" name = "lambda_role"
assume_role_policy = <<EOF assume_role_policy = <<EOF
{ {
"Version": "2012-10-17", "Version": "2012-10-17",
"Statement": [ "Statement": [
@ -23,60 +31,60 @@ EOF
} }
resource "aws_lambda_function" "lambda_func" { resource "aws_lambda_function" "lambda_func" {
function_name = "boilerplate_function" function_name = "boilerplate_function"
role = aws_iam_role.lambda_role.arn role = aws_iam_role.lambda_role.arn
handler = "src.base.handler" handler = "src.base.handler"
runtime = "python3.8" runtime = "python3.8"
s3_bucket = var.artifacts_bucket_name s3_bucket = var.artifacts_bucket_name
s3_key = var.lambda_archive_name s3_key = var.lambda_archive_name
} }
resource "aws_api_gateway_rest_api" "gateway" { resource "aws_api_gateway_rest_api" "gateway" {
name = "boilerplate" name = "boilerplate"
description = "Lambda Boilerplate" description = "Lambda Boilerplate"
} }
resource "aws_api_gateway_resource" "lambda_proxy" { resource "aws_api_gateway_resource" "lambda_proxy" {
rest_api_id = aws_api_gateway_rest_api.gateway.id rest_api_id = aws_api_gateway_rest_api.gateway.id
parent_id = aws_api_gateway_rest_api.gateway.root_resource_id parent_id = aws_api_gateway_rest_api.gateway.root_resource_id
path_part = "{proxy+}" path_part = "{proxy+}"
} }
resource "aws_api_gateway_method" "lambda_proxy" { resource "aws_api_gateway_method" "lambda_proxy" {
rest_api_id = aws_api_gateway_rest_api.gateway.id rest_api_id = aws_api_gateway_rest_api.gateway.id
resource_id = aws_api_gateway_resource.lambda_proxy.id resource_id = aws_api_gateway_resource.lambda_proxy.id
http_method = "ANY" http_method = "ANY"
authorization = "NONE" authorization = "NONE"
} }
resource "aws_api_gateway_integration" "lambda" { resource "aws_api_gateway_integration" "lambda" {
rest_api_id = aws_api_gateway_rest_api.gateway.id rest_api_id = aws_api_gateway_rest_api.gateway.id
resource_id = aws_api_gateway_resource.lambda_proxy.id resource_id = aws_api_gateway_resource.lambda_proxy.id
http_method = aws_api_gateway_method.lambda_proxy.http_method http_method = aws_api_gateway_method.lambda_proxy.http_method
integration_http_method = "POST" integration_http_method = "POST"
type = "AWS_PROXY" type = "AWS_PROXY"
uri = aws_lambda_function.lambda_func.invoke_arn uri = aws_lambda_function.lambda_func.invoke_arn
} }
resource "aws_api_gateway_deployment" "lambda" { resource "aws_api_gateway_deployment" "lambda" {
depends_on = [ depends_on = [
aws_api_gateway_integration.lambda aws_api_gateway_integration.lambda
] ]
rest_api_id = aws_api_gateway_rest_api.gateway.id rest_api_id = aws_api_gateway_rest_api.gateway.id
stage_name = "test" stage_name = "test"
} }
resource "aws_lambda_permission" "apigw" { resource "aws_lambda_permission" "apigw" {
statement_id = "AllowAPIGatewayInvoke" statement_id = "AllowAPIGatewayInvoke"
action = "lambda:InvokeFunction" action = "lambda:InvokeFunction"
function_name = aws_lambda_function.lambda_func.function_name function_name = aws_lambda_function.lambda_func.function_name
principal = "apigateway.amazonaws.com" principal = "apigateway.amazonaws.com"
source_arn = "${aws_api_gateway_rest_api.gateway.execution_arn}/*/*" source_arn = "${aws_api_gateway_rest_api.gateway.execution_arn}/*/*"
} }
output "base_url" { output "base_url" {
value = aws_api_gateway_deployment.lambda.invoke_url value = aws_api_gateway_deployment.lambda.invoke_url
} }

View file

@ -1,11 +1,11 @@
variable "aws_region" { variable "aws_region" {
type = string type = string
} }
variable "artifacts_bucket_name" { variable "artifacts_bucket_name" {
type = string type = string
} }
variable "lambda_archive_name" { variable "lambda_archive_name" {
type = string type = string
} }

View file

@ -1,3 +1,3 @@
output "artifacts_bucket_name" { output "artifacts_bucket_name" {
value = aws_s3_bucket.artifacts.bucket value = aws_s3_bucket.artifacts.bucket
} }

View file

@ -1,5 +1,13 @@
provider aws { terraform {
profile = "default" required_version = ">=1.0"
region = var.aws_region
required_providers {
aws = "4.29.0"
}
}
provider "aws" {
profile = "default"
region = var.aws_region
} }

View file

@ -1,8 +1,8 @@
resource "aws_s3_bucket" "artifacts" { resource "aws_s3_bucket" "artifacts" {
bucket = var.artifacts_bucket_name bucket = var.artifacts_bucket_name
acl = "private" acl = "private"
tags = { tags = {
Name = var.artifacts_bucket_name Name = var.artifacts_bucket_name
} }
} }

View file

@ -1,7 +1,7 @@
variable "artifacts_bucket_name" { variable "artifacts_bucket_name" {
type = string type = string
} }
variable "aws_region" { variable "aws_region" {
type = string type = string
} }

View file

View file

@ -1,6 +0,0 @@
#
# This file is autogenerated by pip-compile with python 3.8
# To update, run:
#
# pip-compile requirements.in
#

View file

@ -1,6 +0,0 @@
boto3
invoke
black
pytest
moto
pytest-cov

View file

@ -1,192 +0,0 @@
#
# This file is autogenerated by pip-compile with python 3.8
# To update, run:
#
# pip-compile --output-file=requirements_dev.txt requirements_dev.in
#
appdirs==1.4.4
# via black
attrs==20.3.0
# via
# jsonschema
# pytest
aws-sam-translator==1.33.0
# via cfn-lint
aws-xray-sdk==2.6.0
# via moto
black==20.8b1
# via -r requirements_dev.in
boto==2.49.0
# via moto
boto3==1.24.66
# via
# -r requirements_dev.in
# aws-sam-translator
# moto
botocore==1.27.66
# via
# aws-xray-sdk
# boto3
# moto
# s3transfer
certifi==2020.12.5
# via requests
cffi==1.14.4
# via cryptography
cfn-lint==0.44.4
# via moto
chardet==4.0.0
# via requests
click==7.1.2
# via black
coverage==5.3.1
# via pytest-cov
cryptography==3.3.1
# via
# moto
# python-jose
# sshpubkeys
decorator==4.4.2
# via networkx
docker==4.4.1
# via moto
ecdsa==0.14.1
# via
# moto
# python-jose
# sshpubkeys
future==0.18.2
# via aws-xray-sdk
idna==2.10
# via
# moto
# requests
iniconfig==1.1.1
# via pytest
invoke==1.5.0
# via -r requirements_dev.in
jinja2==2.11.2
# via moto
jmespath==0.10.0
# via
# boto3
# botocore
jsondiff==1.2.0
# via moto
jsonpatch==1.28
# via cfn-lint
jsonpickle==1.5.0
# via aws-xray-sdk
jsonpointer==2.0
# via jsonpatch
jsonschema==3.2.0
# via
# aws-sam-translator
# cfn-lint
junit-xml==1.9
# via cfn-lint
markupsafe==1.1.1
# via
# jinja2
# moto
mock==4.0.3
# via moto
more-itertools==8.6.0
# via moto
moto==1.3.16
# via -r requirements_dev.in
mypy-extensions==0.4.3
# via black
networkx==2.5
# via cfn-lint
packaging==20.8
# via pytest
pathspec==0.8.1
# via black
pluggy==0.13.1
# via pytest
py==1.10.0
# via pytest
pyasn1==0.4.8
# via
# python-jose
# rsa
pycparser==2.20
# via cffi
pyparsing==2.4.7
# via packaging
pyrsistent==0.17.3
# via jsonschema
pytest==6.2.1
# via
# -r requirements_dev.in
# pytest-cov
pytest-cov==2.11.1
# via -r requirements_dev.in
python-dateutil==2.8.1
# via
# botocore
# moto
python-jose[cryptography]==3.2.0
# via moto
pytz==2020.5
# via moto
pyyaml==5.4.1
# via
# cfn-lint
# moto
regex==2020.11.13
# via black
requests==2.25.1
# via
# docker
# moto
# responses
responses==0.12.1
# via moto
rsa==4.7
# via python-jose
s3transfer==0.6.0
# via boto3
six==1.15.0
# via
# aws-sam-translator
# cfn-lint
# cryptography
# docker
# ecdsa
# jsonschema
# junit-xml
# moto
# python-dateutil
# python-jose
# responses
# websocket-client
sshpubkeys==3.1.0
# via moto
toml==0.10.2
# via
# black
# pytest
typed-ast==1.4.2
# via black
typing-extensions==3.7.4.3
# via black
urllib3==1.26.2
# via
# botocore
# requests
# responses
websocket-client==0.57.0
# via docker
werkzeug==1.0.1
# via moto
wrapt==1.12.1
# via aws-xray-sdk
xmltodict==0.12.0
# via moto
zipp==3.4.0
# via moto
# The following packages are considered to be unsafe in a requirements file:
# setuptools

31
script/apply Normal file
View file

@ -0,0 +1,31 @@
#!/usr/bin/bash
# Applies infrastructure changes for the given project.
#
# The project name is expected to be passed as an environment variable,
# i.e. PROJECT=app . script/apply
(
source $(dirname $0)/../.config
PROJECT_ROOT=$(realpath $BOOTSTRAP_ROOT/infrastructure/$PROJECT)
cd $PROJECT_ROOT
terraform init
# Some resources are always marked as tainted
# to force their recreation.
declare -a ALWAYS_TAINT_RESOURCES=(
"aws_lambda_function.apgnd_lambda_func"
"aws_lambda_permission.apigw"
)
for RESOURCE in $ALWAYS_TAINT_RESOURCES
do
terraform taint --allow-missing $RESOURCE
done
terraform apply --var-file $VARIABLES_PATH
)

View file

@ -1,22 +1,6 @@
if [ "$CI" -eq 1 ]; then #!/usr/bin/bash
{
python -m pip install -U pip pip-tools &&
pip install -r requirements_dev.txt
}
else
VENV=lambda-boilerplate.venv
################################################################# TFLINT_INSTALL_PATH=./.tflint
# Bootstrapping sets up the Python 3.8 venv that allows the use #
# of the invoke commands. #
#################################################################
test -d $VENV || python3 -m venv $VENV || return
. $VENV/bin/activate
python -m pip install -U pip pip-tools --no-cache-dir
python -m pip install -r requirements.txt
python -m pip install -r requirements_dev.txt
echo "✨ Good to go! ✨"
fi
# Set up Terraform tooling
curl -s https://raw.githubusercontent.com/terraform-linters/tflint/master/install_linux.sh | TFLINT_INSTALL_PATH=$TFLINT_INSTALL_PATH bash

17
script/destroy Normal file
View file

@ -0,0 +1,17 @@
#!/usr/bin/bash
# Destroys resources for the given project.
#
# The project name is expected to be passed as an environment variable,
# i.e. PROJECT=app . script/destroy
(
source $(dirname $0)/../.config
PROJECT_ROOT=$(realpath $BOOTSTRAP_ROOT/infrastructure/$PROJECT)
cd $PROJECT_ROOT
terraform init
terraform destroy --var-file $VARIABLES_PATH
)

19
script/invoke Normal file
View file

@ -0,0 +1,19 @@
#!/usr/bin/bash
# Triggers the lambda with a given payload.
#
# The payload is provided as a JSON blob through the PAYLOAD argument,
# the FUNCTION_NAME should also be provided.
#
# PAYLOAD=<json payload> FUNCTION_NAME=<function-name> . script/invoke
(
source $(dirname $0)/../.config
cd $PROJECT_ROOT
aws lambda invoke --endpoint http://localhost:9001 --no-sign-request --function-name $FUNCTION_NAME --log-type Tail
--payload $PAYLOAD $FUNCTION_NAME_out.json"
)
)

17
script/plan Normal file
View file

@ -0,0 +1,17 @@
#!/usr/bin/bash
# Generates a plan for the given project.
#
# The project name is expected to be passed as an environment variable,
# i.e. PROJECT=app . script/plan
(
source $(dirname $0)/../.config
PROJECT_ROOT=$(realpath $BOOTSTRAP_ROOT/infrastructure/$PROJECT)
cd $PROJECT_ROOT
terraform init
terraform plan --var-file $VARIABLES_PATH
)

20
script/push Normal file
View file

@ -0,0 +1,20 @@
#!/usr/bin/bash
# Pushes the lambda archive to S3.
#
# This is a pre-condition to script/apply since creating
# the Lambda function will expect the source code to be
# available in the artifacts bucket.
#
# The project name is expected to be passed as an environment variable,
# i.e. ARCHIVE=<path-to-archive> . script/push
(
source $(dirname $0)/../.config
BOOTSTRAP_PROJECT=$(realpath $BOOTSTRAP_ROOT/infrastructure/bootstrap)
cd $BOOTSTRAP_PROJECT
BUCKET_NAME=$(terraform output --json | jq .artifacts_bucket_name.value -r)
cd -
aws s3 cp $ARCHIVE s3://$BUCKET_NAME
)

11
script/start Normal file
View file

@ -0,0 +1,11 @@
#!/usr/bin/bash
# Starts a local instance of the lambda.
(
source $(dirname $0)/../.config
cd $PROJECT_ROOT
docker-compose up -d --build
)

11
script/stop Normal file
View file

@ -0,0 +1,11 @@
#!/usr/bin/bash
# Stops a running local instange of the lambda.
(
source $(dirname $0)/../.config
cd $PROJECT_ROOT
docker-compose down
)

19
script/terraform-lint Normal file
View file

@ -0,0 +1,19 @@
#!/bin/bash
# Lints and formats terraform files.
(
TFLINT_PATH=$(realpath ./.tflint/tflint)
for PROJECT in "bootstrap" "app"
do
(
PROJECT_ROOT=./infrastructure/$PROJECT
echo ">>> Linting $PROJECT_ROOT"
cd $PROJECT_ROOT
terraform init
$TFLINT_PATH # --var-file ../variables.tfvars
terraform fmt -write=true
)
done
)

218
tasks.py
View file

@ -1,218 +0,0 @@
from invoke import task, Collection
import boto3
import os
from pathlib import Path
import hashlib
import re
from typing import List, Dict
BASE_PATH = str(Path(__file__).parent.absolute())
VARIABLES_PATH = "../variables.tfvars"
HELP_SEGMENTS = {
"project": "Project name, either app or bootstrap",
"archive": "Archive file",
"function_name": "Name of the Lambda to invoke locally (as defined in the Cloudformation template)",
"payload": "JSON payload to include in the trigger event",
"fix": "Whether to fix errors",
"env": "Environment (dev or prod)",
"package": "Target package (incl. range)",
}
def _compose_path(path: str) -> str:
return str(Path(BASE_PATH, path).absolute())
def _build_help_dict(segments: List[str]) -> Dict[str, str]:
return {segment: HELP_SEGMENTS[segment] for segment in segments}
PROJECT_PATHS = {
"app": _compose_path("infrastructure/app"),
"bootstrap": _compose_path("infrastructure/bootstrap"),
}
#####################
# Cloud invocations #
#####################
@task(name="plan", help=_build_help_dict(["project"]))
def cloud_plan(ctx, project):
"""
Builds the Terraform plan for the given project.
"""
with ctx.cd(PROJECT_PATHS[project]):
ctx.run("terraform init")
ctx.run(f"terraform plan --var-file {VARIABLES_PATH}")
@task(name="apply", help=_build_help_dict(["project"]))
def cloud_apply(ctx, project):
"""
Applies infrastructure changes to the given project.
"""
with ctx.cd(PROJECT_PATHS[project]):
ctx.run("terraform init")
ctx.run("terraform taint --allow-missing aws_lambda_function.apgnd_lambda_func")
ctx.run("terraform taint --allow-missing aws_lambda_permission.apigw")
ctx.run(f"terraform apply --var-file {VARIABLES_PATH}")
@task(name="destroy", help=_build_help_dict(["project"]))
def cloud_destroy(ctx, project):
"""
Destroys resources associated with the given project.
"""
with ctx.cd(PROJECT_PATHS[project]):
ctx.run("terraform init")
ctx.run(f"terraform destroy --var-file {VARIABLES_PATH}")
@task(name="pack")
def cloud_pack(ctx):
"""
Prepares and packages the source code for lambdas.
"""
with ctx.cd(BASE_PATH):
ctx.run("pip install -r requirements.txt --target package/")
ctx.run("zip -r lambda_function.zip src/*")
with ctx.cd(_compose_path("package")):
ctx.run("zip -r ../lambda_function.zip ./")
@task(name="push", help=_build_help_dict(["archive"]))
def cloud_push(ctx, archive):
"""
Pushes the given archive to S3.
"""
artifacts_bucket = None
with ctx.cd(_compose_path(PROJECT_PATHS["bootstrap"])):
out = ctx.run("terraform output", hide="out").stdout
artifacts_bucket_match = re.match(
'artifacts_bucket_name = "(?P<bucket_name>[0-9a-zA-Z-_]+)"\n?', out
)
artifacts_bucket = artifacts_bucket_match.group("bucket_name")
with ctx.cd(BASE_PATH):
ctx.run(f"aws s3 cp {archive} s3://{artifacts_bucket}", hide="out")
print(f"Uploaded {archive} to s3 ({artifacts_bucket})!")
#####################
# Local invocations #
#####################
@task(name="start")
def local_start(ctx):
"""
Starts your stack locally.
"""
ctx.run("docker-compose up -d --build")
@task(name="stop")
def local_stop(ctx):
"""
Stops your local stack.
"""
ctx.run("docker-compose down")
@task(
name="invoke",
help=_build_help_dict(["function_name", "payload"]),
)
def local_invoke(ctx, function_name, payload):
"""
Triggers the local lambda with the given payload
"""
ctx.run(
f"aws lambda invoke --endpoint http://localhost:9001 --no-sign-request --function-name {function_name} --log-type Tail --payload {payload} {function_name}_out.json"
)
#####################
# Other invocations #
####################
@task(name="lock")
def lock_requirements(ctx):
"""
Builds the pip lockfile
"""
with ctx.cd(BASE_PATH):
ctx.run("python -m piptools compile requirements.in", hide="both")
ctx.run(
"python -m piptools compile requirements_dev.in --output-file requirements_dev.txt",
hide="both",
)
@task(name="update", help=_build_help_dict(["env", "package"]))
def update_requirements(ctx, env, package):
"""
Updates a package an regenerates the lockfiles.
"""
deps = None
if env == "prod":
deps = "requirements.in"
elif env == "dev":
deps = "requirements_dev.in"
else:
raise ValueError("Invalid env")
with ctx.cd(BASE_PATH):
ctx.run(f"python -m piptools compile {deps} --upgrade-package {package}")
@task(name="lint", help=_build_help_dict(["fix"]))
def lint(ctx, fix=False):
"""
Lints
"""
with ctx.cd(BASE_PATH):
ctx.run("black *.py **/*.py" + (" --check" if not fix else ""))
@task(name="test")
def test(ctx):
"""
Runs tests
"""
with ctx.cd(BASE_PATH):
ctx.run("pytest --cov=src")
ns = Collection()
local = Collection("local")
local.add_task(local_start)
local.add_task(local_stop)
local.add_task(local_invoke)
cloud = Collection("cloud")
cloud.add_task(cloud_plan)
cloud.add_task(cloud_apply)
cloud.add_task(cloud_destroy)
cloud.add_task(cloud_pack)
cloud.add_task(cloud_push)
project = Collection("requirements")
project.add_task(lock_requirements)
project.add_task(update_requirements)
ns.add_collection(local)
ns.add_collection(cloud)
ns.add_collection(project)
ns.add_task(lint)
ns.add_task(test)