2020-09-26 05:14:56 +00:00
|
|
|
from invoke import task, Collection
|
|
|
|
import boto3
|
|
|
|
import os
|
|
|
|
from pathlib import Path
|
|
|
|
import hashlib
|
2021-03-19 17:38:54 +00:00
|
|
|
import re
|
|
|
|
from typing import List, Dict
|
|
|
|
|
|
|
|
BASE_PATH = str(Path(__file__).parent.absolute())
|
|
|
|
VARIABLES_PATH = "../variables.tfvars"
|
|
|
|
|
|
|
|
HELP_SEGMENTS = {
|
|
|
|
"project": "Project name, either app or bootstrap",
|
|
|
|
"archive": "Archive file",
|
|
|
|
"function_name": "Name of the Lambda to invoke locally (as defined in the Cloudformation template)",
|
|
|
|
"payload": "JSON payload to include in the trigger event",
|
|
|
|
"fix": "Whether to fix errors",
|
|
|
|
"env": "Environment (dev or prod)",
|
|
|
|
"package": "Target package (incl. range)",
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def _compose_path(path: str) -> str:
|
|
|
|
return str(Path(BASE_PATH, path).absolute())
|
|
|
|
|
|
|
|
|
|
|
|
def _build_help_dict(segments: List[str]) -> Dict[str, str]:
|
|
|
|
return {segment: HELP_SEGMENTS[segment] for segment in segments}
|
|
|
|
|
|
|
|
|
|
|
|
PROJECT_PATHS = {
|
|
|
|
"app": _compose_path("infrastructure/app"),
|
|
|
|
"bootstrap": _compose_path("infrastructure/bootstrap"),
|
|
|
|
}
|
2020-09-26 05:14:56 +00:00
|
|
|
|
|
|
|
#####################
|
2021-03-19 17:38:54 +00:00
|
|
|
# Cloud invocations #
|
2020-09-26 05:14:56 +00:00
|
|
|
#####################
|
|
|
|
|
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
@task(name="plan", help=_build_help_dict(["project"]))
|
|
|
|
def cloud_plan(ctx, project):
|
|
|
|
"""
|
|
|
|
Builds the Terraform plan for the given project.
|
|
|
|
"""
|
|
|
|
with ctx.cd(PROJECT_PATHS[project]):
|
2022-09-03 21:52:22 +00:00
|
|
|
ctx.run("terraform init")
|
2021-03-19 17:38:54 +00:00
|
|
|
ctx.run(f"terraform plan --var-file {VARIABLES_PATH}")
|
2020-09-26 05:14:56 +00:00
|
|
|
|
2020-09-26 15:40:18 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
@task(name="apply", help=_build_help_dict(["project"]))
|
|
|
|
def cloud_apply(ctx, project):
|
|
|
|
"""
|
|
|
|
Applies infrastructure changes to the given project.
|
|
|
|
"""
|
|
|
|
with ctx.cd(PROJECT_PATHS[project]):
|
2022-09-03 21:52:22 +00:00
|
|
|
ctx.run("terraform init")
|
2021-03-19 17:38:54 +00:00
|
|
|
ctx.run("terraform taint --allow-missing aws_lambda_function.apgnd_lambda_func")
|
|
|
|
ctx.run("terraform taint --allow-missing aws_lambda_permission.apigw")
|
|
|
|
ctx.run(f"terraform apply --var-file {VARIABLES_PATH}")
|
2020-09-26 05:14:56 +00:00
|
|
|
|
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
@task(name="destroy", help=_build_help_dict(["project"]))
|
|
|
|
def cloud_destroy(ctx, project):
|
|
|
|
"""
|
|
|
|
Destroys resources associated with the given project.
|
|
|
|
"""
|
|
|
|
with ctx.cd(PROJECT_PATHS[project]):
|
2022-09-03 21:52:22 +00:00
|
|
|
ctx.run("terraform init")
|
2021-03-19 17:38:54 +00:00
|
|
|
ctx.run(f"terraform destroy --var-file {VARIABLES_PATH}")
|
2020-09-26 05:14:56 +00:00
|
|
|
|
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
@task(name="pack")
|
|
|
|
def cloud_pack(ctx):
|
|
|
|
"""
|
|
|
|
Prepares and packages the source code for lambdas.
|
|
|
|
"""
|
|
|
|
with ctx.cd(BASE_PATH):
|
|
|
|
ctx.run("pip install -r requirements.txt --target package/")
|
|
|
|
ctx.run("zip -r lambda_function.zip src/*")
|
|
|
|
|
|
|
|
with ctx.cd(_compose_path("package")):
|
|
|
|
ctx.run("zip -r ../lambda_function.zip ./")
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="push", help=_build_help_dict(["archive"]))
|
|
|
|
def cloud_push(ctx, archive):
|
|
|
|
"""
|
|
|
|
Pushes the given archive to S3.
|
|
|
|
"""
|
|
|
|
artifacts_bucket = None
|
|
|
|
|
|
|
|
with ctx.cd(_compose_path(PROJECT_PATHS["bootstrap"])):
|
|
|
|
out = ctx.run("terraform output", hide="out").stdout
|
|
|
|
artifacts_bucket_match = re.match(
|
2022-09-03 21:52:22 +00:00
|
|
|
'artifacts_bucket_name = "(?P<bucket_name>[0-9a-zA-Z-_]+)"\n?', out
|
2021-03-19 17:38:54 +00:00
|
|
|
)
|
2022-09-03 21:52:22 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
artifacts_bucket = artifacts_bucket_match.group("bucket_name")
|
2020-09-26 05:14:56 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
with ctx.cd(BASE_PATH):
|
|
|
|
ctx.run(f"aws s3 cp {archive} s3://{artifacts_bucket}", hide="out")
|
|
|
|
|
|
|
|
print(f"Uploaded {archive} to s3 ({artifacts_bucket})!")
|
2020-09-26 05:14:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
#####################
|
|
|
|
# Local invocations #
|
|
|
|
#####################
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="start")
|
2021-03-19 17:38:54 +00:00
|
|
|
def local_start(ctx):
|
|
|
|
"""
|
|
|
|
Starts your stack locally.
|
|
|
|
"""
|
2020-09-26 05:14:56 +00:00
|
|
|
ctx.run("docker-compose up -d --build")
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="stop")
|
2021-03-19 17:38:54 +00:00
|
|
|
def local_stop(ctx):
|
|
|
|
"""
|
|
|
|
Stops your local stack.
|
|
|
|
"""
|
2020-09-26 05:14:56 +00:00
|
|
|
ctx.run("docker-compose down")
|
|
|
|
|
|
|
|
|
|
|
|
@task(
|
2021-03-19 17:38:54 +00:00
|
|
|
name="invoke",
|
|
|
|
help=_build_help_dict(["function_name", "payload"]),
|
2020-09-26 05:14:56 +00:00
|
|
|
)
|
2021-03-19 17:38:54 +00:00
|
|
|
def local_invoke(ctx, function_name, payload):
|
|
|
|
"""
|
|
|
|
Triggers the local lambda with the given payload
|
|
|
|
"""
|
2020-09-26 05:14:56 +00:00
|
|
|
ctx.run(
|
|
|
|
f"aws lambda invoke --endpoint http://localhost:9001 --no-sign-request --function-name {function_name} --log-type Tail --payload {payload} {function_name}_out.json"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
#####################
|
|
|
|
# Other invocations #
|
|
|
|
####################
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="lock")
|
|
|
|
def lock_requirements(ctx):
|
|
|
|
"""
|
|
|
|
Builds the pip lockfile
|
|
|
|
"""
|
|
|
|
with ctx.cd(BASE_PATH):
|
|
|
|
ctx.run("python -m piptools compile requirements.in", hide="both")
|
|
|
|
ctx.run(
|
|
|
|
"python -m piptools compile requirements_dev.in --output-file requirements_dev.txt",
|
|
|
|
hide="both",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="update", help=_build_help_dict(["env", "package"]))
|
|
|
|
def update_requirements(ctx, env, package):
|
|
|
|
"""
|
|
|
|
Updates a package an regenerates the lockfiles.
|
|
|
|
"""
|
|
|
|
deps = None
|
|
|
|
|
|
|
|
if env == "prod":
|
|
|
|
deps = "requirements.in"
|
|
|
|
elif env == "dev":
|
|
|
|
deps = "requirements_dev.in"
|
|
|
|
else:
|
|
|
|
raise ValueError("Invalid env")
|
|
|
|
|
|
|
|
with ctx.cd(BASE_PATH):
|
|
|
|
ctx.run(f"python -m piptools compile {deps} --upgrade-package {package}")
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="lint", help=_build_help_dict(["fix"]))
|
|
|
|
def lint(ctx, fix=False):
|
|
|
|
"""
|
|
|
|
Lints
|
|
|
|
"""
|
|
|
|
with ctx.cd(BASE_PATH):
|
|
|
|
ctx.run("black *.py **/*.py" + (" --check" if not fix else ""))
|
|
|
|
|
|
|
|
|
|
|
|
@task(name="test")
|
|
|
|
def test(ctx):
|
|
|
|
"""
|
|
|
|
Runs tests
|
|
|
|
"""
|
|
|
|
with ctx.cd(BASE_PATH):
|
|
|
|
ctx.run("pytest --cov=src")
|
|
|
|
|
|
|
|
|
2020-09-26 05:14:56 +00:00
|
|
|
ns = Collection()
|
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
local = Collection("local")
|
|
|
|
local.add_task(local_start)
|
|
|
|
local.add_task(local_stop)
|
|
|
|
local.add_task(local_invoke)
|
2020-09-26 05:14:56 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
cloud = Collection("cloud")
|
|
|
|
cloud.add_task(cloud_plan)
|
|
|
|
cloud.add_task(cloud_apply)
|
|
|
|
cloud.add_task(cloud_destroy)
|
|
|
|
cloud.add_task(cloud_pack)
|
|
|
|
cloud.add_task(cloud_push)
|
2020-09-26 05:14:56 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
project = Collection("requirements")
|
|
|
|
project.add_task(lock_requirements)
|
|
|
|
project.add_task(update_requirements)
|
2020-09-26 05:14:56 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
ns.add_collection(local)
|
|
|
|
ns.add_collection(cloud)
|
|
|
|
ns.add_collection(project)
|
2020-09-26 05:14:56 +00:00
|
|
|
|
2021-03-19 17:38:54 +00:00
|
|
|
ns.add_task(lint)
|
|
|
|
ns.add_task(test)
|