feat(backend): file deletion flow (#16)

* feat(backend): file deletion flow

* ci: set change-check flags when no changes detected

* feat(backend): refactor settings to be a singleton object

* refactor(backend): cleaning delete flow cruft

* test(backend): add coverage for deletion + adapt coverage to only rely on endpoints

* docs: type responses for deletions
This commit is contained in:
Marc 2023-08-14 08:26:05 -04:00 committed by GitHub
parent 7fa2aff9f9
commit fdc402f76a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 216 additions and 19 deletions

View file

@ -41,10 +41,14 @@ jobs:
if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./frontend)" ] if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./frontend)" ]
then then
echo "fe_changed=true" >> "$GITHUB_OUTPUT" echo "fe_changed=true" >> "$GITHUB_OUTPUT"
else
echo "fe_changed=false" >> "$GITHUB_OUTPUT"
fi fi
if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./backend)" ] if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./backend)" ]
then then
echo "be_changed=true" >> "$GITHUB_OUTPUT" echo "be_changed=true" >> "$GITHUB_OUTPUT"
else
echo "be_changed=false" >> "$GITHUB_OUTPUT"
fi fi
backend: backend:
uses: ./.github/workflows/backend-pipeline.yml uses: ./.github/workflows/backend-pipeline.yml

View file

@ -47,6 +47,8 @@ tasks:
migrate: migrate:
desc: "Applies migrations. Usage: be:migrate -- <up|down>" desc: "Applies migrations. Usage: be:migrate -- <up|down>"
deps: [bootstrap] deps: [bootstrap]
env:
PYTHONPATH: '..'
dotenv: dotenv:
- "{{ .DOTENV }}" - "{{ .DOTENV }}"
cmd: "{{ .VENV_BIN }}/python migrate.py {{ .CLI_ARGS }}" cmd: "{{ .VENV_BIN }}/python migrate.py {{ .CLI_ARGS }}"

View file

@ -2,3 +2,4 @@ fastapi~=0.101
uvicorn[standard] uvicorn[standard]
python-multipart python-multipart
psycopg2 psycopg2
typing_extensions

View file

@ -34,6 +34,7 @@ starlette==0.27.0
# via fastapi # via fastapi
typing-extensions==4.7.1 typing-extensions==4.7.1
# via # via
# -r requirements.in
# fastapi # fastapi
# pydantic # pydantic
# pydantic-core # pydantic-core

View file

@ -5,9 +5,12 @@ This API allows users to create and query for existing data about
files that live in the system. files that live in the system.
""" """
import pathlib
from fastapi import APIRouter, HTTPException, UploadFile from fastapi import APIRouter, HTTPException, UploadFile
import use_cases.files as files_use_cases import use_cases.files as files_use_cases
from settings import settings
router = APIRouter(prefix="/files") router = APIRouter(prefix="/files")
@ -23,11 +26,12 @@ async def upload_file(file: UploadFile):
size = len(content) size = len(content)
await file.seek(0) await file.seek(0)
with open(file.filename, "wb") as f: dest_path = pathlib.Path(settings.STORAGE_ROOT, file.filename)
with open(dest_path, "wb") as f:
content = await file.read() content = await file.read()
f.write(content) f.write(content)
created_record = files_use_cases.create_file_record(file.filename, size) created_record = files_use_cases.create_file_record(str(dest_path), size)
return created_record return created_record
@ -40,3 +44,31 @@ def get_file_details(file_id: str):
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
return file return file
@router.delete("/{file_id}/")
def delete_file(file_id: str) -> files_use_cases.FileRecord:
"""
Deletes a file given its ID.
This will delete the file in the database records as well
as on disk. The operation is not reversible.
DELETE /files/{file_id}/
200 { <FileRecord> }
The file exists and has been deleted from storage and
from the database.
404 {}
The file ID did not map to anything.
"""
try:
file = files_use_cases.delete_file_record_by_id(file_id)
except files_use_cases.DoesNotExist as exc:
raise HTTPException(status_code=404) from exc
return file

View file

@ -1,6 +1,6 @@
import psycopg2 import psycopg2
import settings from settings import settings
def get_connection(): def get_connection():

View file

@ -1,5 +1,9 @@
import os
DATABASE_USERNAME = "postgres" DATABASE_USERNAME = "postgres"
DATABASE_PASSWORD = "test" DATABASE_PASSWORD = "test"
DATABASE_HOST = "localhost" DATABASE_HOST = "localhost"
DATABASE_PORT = 5431 DATABASE_PORT = 5431
DATABASE_NAME = "postgres" DATABASE_NAME = "postgres"
STORAGE_ROOT = os.getenv("ROTINI_STORAGE_ROOT")

View file

@ -27,7 +27,6 @@ Not including a migration name executes everything from the last executed
migration. migration.
""" """
import os
import collections import collections
import pathlib import pathlib
import datetime import datetime
@ -38,7 +37,7 @@ import sys
import psycopg2 import psycopg2
import settings from settings import settings
VALID_COMMANDS = ["up", "down", "new"] VALID_COMMANDS = ["up", "down", "new"]

View file

@ -1,12 +1,50 @@
# pylint: disable=unused-import, wildcard-import, unused-wildcard-import # pylint: disable=unused-import, wildcard-import, unused-wildcard-import, too-few-public-methods
import os import os
import typing
IS_CI = os.getenv("ROTINI_CI") IS_CI = os.getenv("ROTINI_CI")
IS_TEST = os.getenv("ROTINI_TEST") IS_TEST = os.getenv("ROTINI_TEST")
class Settings:
"""
Representation of the configuration settings available to the
application.
"""
ENV: str
DATABASE_USERNAME: str
DATABASE_PASSWORD: str
DATABASE_HOST: str
DATABASE_PORT: int
DATABASE_NAME: str
STORAGE_ROOT: typing.Optional[str] = "."
def __init__(self, *_, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def extract_settings(env: str, imported_module) -> Settings:
"""
Extracts all the exposed values from the given module and
creates a corresponding Settings object.
"""
imported_values = {
k: v for k, v in imported_module.__dict__.items() if not k.startswith("__")
}
return Settings(ENV=env, **imported_values)
if IS_CI is not None: if IS_CI is not None:
from envs.ci import * import envs.ci as ci_config
settings = extract_settings("ci", ci_config)
elif IS_TEST is not None: elif IS_TEST is not None:
from envs.test import * import envs.test as test_config
settings = extract_settings("ci", test_config)
else: else:
from envs.local import * import envs.local as local_config
settings = extract_settings("ci", local_config)

View file

@ -5,10 +5,18 @@ Use cases and data structures defined in this file
manipulate file records in the database or represent them manipulate file records in the database or represent them
after they have been read. after they have been read.
""" """
import typing
import pathlib import pathlib
import typing_extensions as typing
from db import get_connection from db import get_connection
from settings import settings
class DoesNotExist(Exception):
"""
General purpose exception signalling a failure to find a database record.
"""
class FileRecord(typing.TypedDict): class FileRecord(typing.TypedDict):
@ -84,3 +92,25 @@ def get_file_record_by_id(file_id: str) -> typing.Optional[FileRecord]:
return FileRecord( return FileRecord(
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
) )
def delete_file_record_by_id(file_id: str) -> typing.Union[typing.NoReturn, FileRecord]:
"""
Deletes a single file by ID, including its presence in storage.
If the ID doesn't correspond to a record, DoesNotExist is raised.
"""
row = None
with get_connection() as connection, connection.cursor() as cursor:
cursor.execute("DELETE FROM files WHERE id=%s RETURNING *;", (file_id,))
row = cursor.fetchone()
if row is None:
raise DoesNotExist()
pathlib.Path(pathlib.Path(settings.STORAGE_ROOT, row[1])).unlink()
return FileRecord(
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
)

View file

@ -1,9 +1,12 @@
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
import pytest import pytest
import unittest.mock
from rotini.main import app from rotini.main import app
from rotini.db import get_connection from rotini.db import get_connection
from settings import settings
@pytest.fixture(name="client") @pytest.fixture(name="client")
def fixture_client(): def fixture_client():
@ -12,5 +15,21 @@ def fixture_client():
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def reset_database(): def reset_database():
"""
Empties all user tables between tests.
"""
with get_connection() as conn, conn.cursor() as cursor: with get_connection() as conn, conn.cursor() as cursor:
cursor.execute("DELETE FROM files;") cursor.execute("DELETE FROM files;")
@pytest.fixture(autouse=True)
def set_storage_path(tmp_path, monkeypatch):
"""
Ensures that files stored by tests are stored
in temporary directories.
"""
files_dir = tmp_path / "files"
files_dir.mkdir()
monkeypatch.setattr(settings, "STORAGE_ROOT", str(files_dir))

View file

@ -1,23 +1,43 @@
from rotini.use_cases import files as files_use_cases import pathlib
def test_list_files_returns_registered_files_and_200(client): def test_list_files_returns_registered_files_and_200(client, tmp_path):
file_1 = files_use_cases.create_file_record("/test1.txt", 123) mock_file_1 = tmp_path / "test1.txt"
file_2 = files_use_cases.create_file_record("/test2.txt", 456) mock_file_1.write_text("testtest")
with open(str(mock_file_1), "rb") as mock_file_stream:
response = client.post("/files/", files={"file": mock_file_stream})
mock_file_1_data = response.json()
mock_file_2 = tmp_path / "test2.txt"
mock_file_2.write_text("testtest")
with open(str(mock_file_2), "rb") as mock_file_stream:
response = client.post("/files/", files={"file": mock_file_stream})
mock_file_2_data = response.json()
response = client.get("/files/") response = client.get("/files/")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == [file_1, file_2] assert response.json() == [mock_file_1_data, mock_file_2_data]
def test_file_details_returns_specified_file_and_200(client): def test_file_details_returns_specified_file_and_200(client, tmp_path):
file = files_use_cases.create_file_record("/test1.txt", 123) mock_file = tmp_path / "test.txt"
mock_file.write_text("testtest")
response = client.get(f"/files/{file['id']}/") with open(str(mock_file), "rb") as mock_file_stream:
response = client.post("/files/", files={"file": mock_file_stream})
response_data = response.json()
created_file_id = response_data["id"]
response = client.get(f"/files/{created_file_id}/")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == file assert response.json() == response_data
def test_file_details_returns_404_if_does_not_exist(client): def test_file_details_returns_404_if_does_not_exist(client):
@ -25,3 +45,50 @@ def test_file_details_returns_404_if_does_not_exist(client):
response = client.get(f"/files/{non_existent_id}/") response = client.get(f"/files/{non_existent_id}/")
assert response.status_code == 404 assert response.status_code == 404
def test_file_deletion_returns_404_if_does_not_exist(client):
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
response = client.delete(f"/files/{non_existent_id}/")
assert response.status_code == 404
def test_file_deletion_deletes_record_and_file(client, tmp_path):
mock_file = tmp_path / "test.txt"
mock_file.write_text("testtest")
with open(str(mock_file), "rb") as mock_file_stream:
response = client.post("/files/", files={"file": mock_file_stream})
response_data = response.json()
file_id = response_data["id"]
file_path = response_data["path"]
assert pathlib.Path(file_path).exists()
response = client.get(f"/files/{file_id}/")
assert response.status_code == 200
client.delete(f"/files/{file_id}/")
assert not pathlib.Path(file_path).exists()
response = client.get(f"/files/{file_id}/")
assert response.status_code == 404
def test_file_deletion_200_and_return_deleted_resource(client, tmp_path):
mock_file = tmp_path / "test.txt"
mock_file.write_text("testtest")
with open(str(mock_file), "rb") as mock_file_stream:
response = client.post("/files/", files={"file": mock_file_stream})
response_data = response.json()
file_id = response_data["id"]
response = client.delete(f"/files/{file_id}/")
assert response.status_code == 200
assert response.json() == response_data