89 lines
2.2 KiB
Python
89 lines
2.2 KiB
Python
"""
|
|
backup-podman-volumes
|
|
|
|
This script exports volumes defined in the configuration to
|
|
S3 and to a local path.
|
|
|
|
Usage: python <path>/backup-podman-volumes.py <config-path>
|
|
"""
|
|
|
|
# /// script
|
|
# dependencies = [
|
|
# "pydantic",
|
|
# "boto3"
|
|
# ]
|
|
# ///
|
|
|
|
import subprocess
|
|
import json
|
|
import sys
|
|
import pathlib
|
|
import logging
|
|
import shutil
|
|
|
|
import pydantic
|
|
import boto3
|
|
|
|
logger = logging.getLogger(__file__)
|
|
logging.basicConfig(
|
|
filename="/var/log/backup-volumes.log", encoding="utf-8", level=logging.INFO
|
|
)
|
|
|
|
|
|
class Config(pydantic.BaseModel):
|
|
# List of volumes to back up.
|
|
volumes: list[str]
|
|
# Local directory to save a copy of the latest volume to.
|
|
local_backup_path: str
|
|
# Bucket name to push the backed up archive to.
|
|
bucket_name: str
|
|
|
|
|
|
def export_volume(volume_name: str, destination: pathlib.Path) -> str:
|
|
"""
|
|
Exports a Podman volume identified by <volume_name> as a file
|
|
located at <destination>.
|
|
"""
|
|
result = subprocess.run(
|
|
f'podman volume export {volume_name} -o "{str(destination)}"',
|
|
shell=True,
|
|
check=True,
|
|
)
|
|
|
|
logger.info(f"Exported {volume_name} to {destination}.")
|
|
|
|
|
|
def push_to_s3(source: pathlib.Path, bucket: str):
|
|
"""
|
|
Pushes the given file to S3.
|
|
|
|
The file name is reused as the object name, and the file
|
|
located at <source> is pushed to bucket <bucket> under that
|
|
name.
|
|
"""
|
|
s3 = boto3.client("s3")
|
|
object_name = source.name
|
|
s3.upload_file(str(source), bucket, object_name)
|
|
|
|
logger.info(f"Pushed {source} to {bucket} as {object_name}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
config_path = sys.argv[1]
|
|
|
|
if not pathlib.Path(config_path).exists():
|
|
logger.error("Did not find configuration file at %s.", config_path)
|
|
raise RuntimeError(f"Did not find configuration at {config_path}")
|
|
|
|
config = Config.parse_file(config_path)
|
|
|
|
for volume in config.volumes:
|
|
exported_fn = f"{volume}.tar"
|
|
exported_path = pathlib.Path("/tmp", exported_fn)
|
|
|
|
export_volume(volume, exported_path)
|
|
local_backup_path = pathlib.Path(config.local_backup_path, exported_fn)
|
|
push_to_s3(exported_path, config.bucket_name)
|
|
shutil.copy(exported_path, local_backup_path)
|
|
|
|
logger.info(f"Backed up {volume} to S3 and local.")
|