feat: volume backup script

This commit is contained in:
Marc 2024-07-06 15:52:06 -04:00
parent 3c1e5545a9
commit 82e15d0697
Signed by: marc
GPG key ID: 048E042F22B5DC79
7 changed files with 147 additions and 1 deletions

3
.gitignore vendored
View file

@ -1,2 +1,5 @@
*.env
.services
config.json
*.egg*
*.venv

View file

@ -1,6 +1,11 @@
# scripts
_Quick and dirty utilities to build, start and stop services._
_Utilities to manage the machine Spadinastan runs on._
## Setup
Python dependencies associated with the scripts can be installed via `./bootstrap.sh`, which provides a venv to run the
scripts in.
## Usage

78
backup-podman-volumes.py Normal file
View file

@ -0,0 +1,78 @@
"""
backup-podman-volumes
This script exports volumes defined in the configuration to
S3 and to a local path.
Usage: python <path>/backup-podman-volumes.py <config-path>
"""
import subprocess
import json
import sys
import pathlib
import logging
import shutil
import pydantic
import boto3
logger = logging.getLogger(__file__)
logging.basicConfig(level=logging.INFO)
class Config(pydantic.BaseModel):
# List of volumes to back up.
volumes: list[str]
# Local directory to save a copy of the latest volume to.
local_backup_path: str
# Bucket name to push the backed up archive to.
bucket_name: str
def export_volume(volume_name: str, destination: pathlib.Path) -> str:
"""
Exports a Podman volume identified by <volume_name> as a file
located at <destination>.
"""
result = subprocess.run(
f'podman volume export {volume_name} -o "{str(destination)}"',
shell=True,
check=True,
)
logger.info(f"Exported {volume_name} to {destination}.")
def push_to_s3(source: pathlib.Path, bucket: str):
"""
Pushes the given file to S3.
The file name is reused as the object name, and the file
located at <source> is pushed to bucket <bucket> under that
name.
"""
s3 = boto3.client("s3")
object_name = source.name
s3.upload_file(str(source), bucket, object_name)
logger.info(f"Pushed {source} to {bucket} as {object_name}")
if __name__ == "__main__":
config_path = sys.argv[1]
if not pathlib.Path(config_path).exists():
raise RuntimeError(f"Did not find configuration at {config_path}")
config = Config.parse_file(config_path)
for volume in config.volumes:
exported_fn = f"{volume}.tar"
exported_path = pathlib.Path("/tmp", exported_fn)
export_volume(volume, exported_path)
local_backup_path = pathlib.Path(config.local_backup_path, exported_fn)
push_to_s3(exported_path, config.bucket_name)
shutil.copy(exported_path, local_backup_path)
logger.info(f"Backed up {volume} to S3 and local.")

15
bootstrap.sh Normal file
View file

@ -0,0 +1,15 @@
#!/bin/bash
python -m venv scripts.venv
. scripts.venv/bin/activate
python -m pip install pip~=24.0 pip-tools~=7.4
if [[ ! -f requirements.txt ]]; then
touch requirements.txt
fi
pip-sync requirements.txt
pip install -e .

3
lock-deps.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/bash
pip-compile -o requirements.txt pyproject.toml

8
pyproject.toml Normal file
View file

@ -0,0 +1,8 @@
[project]
name = "scripts"
version = "0"
dependencies = [
"boto3",
"pydantic",
]
requires-python = "~= 3.12"

34
requirements.txt Normal file
View file

@ -0,0 +1,34 @@
#
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --output-file=requirements.txt pyproject.toml
#
annotated-types==0.7.0
# via pydantic
boto3==1.34.140
# via scripts (pyproject.toml)
botocore==1.34.140
# via
# boto3
# s3transfer
jmespath==1.0.1
# via
# boto3
# botocore
pydantic==2.8.2
# via scripts (pyproject.toml)
pydantic-core==2.20.1
# via pydantic
python-dateutil==2.9.0.post0
# via botocore
s3transfer==0.10.2
# via boto3
six==1.16.0
# via python-dateutil
typing-extensions==4.12.2
# via
# pydantic
# pydantic-core
urllib3==2.2.2
# via botocore