feat(backend): file deletion flow #16
12 changed files with 216 additions and 19 deletions
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
|
@ -41,10 +41,14 @@ jobs:
|
|||
if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./frontend)" ]
|
||||
then
|
||||
echo "fe_changed=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "fe_changed=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./backend)" ]
|
||||
then
|
||||
echo "be_changed=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "be_changed=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
backend:
|
||||
uses: ./.github/workflows/backend-pipeline.yml
|
||||
|
|
|
@ -47,6 +47,8 @@ tasks:
|
|||
migrate:
|
||||
desc: "Applies migrations. Usage: be:migrate -- <up|down>"
|
||||
deps: [bootstrap]
|
||||
env:
|
||||
PYTHONPATH: '..'
|
||||
dotenv:
|
||||
- "{{ .DOTENV }}"
|
||||
cmd: "{{ .VENV_BIN }}/python migrate.py {{ .CLI_ARGS }}"
|
||||
|
|
|
@ -2,3 +2,4 @@ fastapi~=0.101
|
|||
uvicorn[standard]
|
||||
python-multipart
|
||||
psycopg2
|
||||
typing_extensions
|
||||
|
|
|
@ -34,6 +34,7 @@ starlette==0.27.0
|
|||
# via fastapi
|
||||
typing-extensions==4.7.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
|
|
|
@ -5,9 +5,12 @@ This API allows users to create and query for existing data about
|
|||
files that live in the system.
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
||||
from fastapi import APIRouter, HTTPException, UploadFile
|
||||
|
||||
import use_cases.files as files_use_cases
|
||||
from settings import settings
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
|
@ -23,11 +26,12 @@ async def upload_file(file: UploadFile):
|
|||
size = len(content)
|
||||
await file.seek(0)
|
||||
|
||||
with open(file.filename, "wb") as f:
|
||||
dest_path = pathlib.Path(settings.STORAGE_ROOT, file.filename)
|
||||
with open(dest_path, "wb") as f:
|
||||
content = await file.read()
|
||||
f.write(content)
|
||||
|
||||
created_record = files_use_cases.create_file_record(file.filename, size)
|
||||
created_record = files_use_cases.create_file_record(str(dest_path), size)
|
||||
|
||||
return created_record
|
||||
|
||||
|
@ -40,3 +44,31 @@ def get_file_details(file_id: str):
|
|||
raise HTTPException(status_code=404)
|
||||
|
||||
return file
|
||||
|
||||
|
||||
@router.delete("/{file_id}/")
|
||||
def delete_file(file_id: str) -> files_use_cases.FileRecord:
|
||||
"""
|
||||
Deletes a file given its ID.
|
||||
|
||||
This will delete the file in the database records as well
|
||||
as on disk. The operation is not reversible.
|
||||
|
||||
DELETE /files/{file_id}/
|
||||
|
||||
200 { <FileRecord> }
|
||||
|
||||
The file exists and has been deleted from storage and
|
||||
from the database.
|
||||
|
||||
404 {}
|
||||
|
||||
The file ID did not map to anything.
|
||||
|
||||
"""
|
||||
try:
|
||||
file = files_use_cases.delete_file_record_by_id(file_id)
|
||||
except files_use_cases.DoesNotExist as exc:
|
||||
raise HTTPException(status_code=404) from exc
|
||||
|
||||
return file
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import psycopg2
|
||||
|
||||
import settings
|
||||
from settings import settings
|
||||
|
||||
|
||||
def get_connection():
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
import os
|
||||
|
||||
DATABASE_USERNAME = "postgres"
|
||||
DATABASE_PASSWORD = "test"
|
||||
DATABASE_HOST = "localhost"
|
||||
DATABASE_PORT = 5431
|
||||
DATABASE_NAME = "postgres"
|
||||
|
||||
STORAGE_ROOT = os.getenv("ROTINI_STORAGE_ROOT")
|
||||
|
|
|
@ -27,7 +27,6 @@ Not including a migration name executes everything from the last executed
|
|||
migration.
|
||||
"""
|
||||
|
||||
import os
|
||||
import collections
|
||||
import pathlib
|
||||
import datetime
|
||||
|
@ -38,7 +37,7 @@ import sys
|
|||
|
||||
import psycopg2
|
||||
|
||||
import settings
|
||||
from settings import settings
|
||||
|
||||
VALID_COMMANDS = ["up", "down", "new"]
|
||||
|
||||
|
|
|
@ -1,12 +1,50 @@
|
|||
# pylint: disable=unused-import, wildcard-import, unused-wildcard-import
|
||||
# pylint: disable=unused-import, wildcard-import, unused-wildcard-import, too-few-public-methods
|
||||
import os
|
||||
import typing
|
||||
|
||||
IS_CI = os.getenv("ROTINI_CI")
|
||||
IS_TEST = os.getenv("ROTINI_TEST")
|
||||
|
||||
|
||||
class Settings:
|
||||
"""
|
||||
Representation of the configuration settings available to the
|
||||
application.
|
||||
"""
|
||||
|
||||
ENV: str
|
||||
DATABASE_USERNAME: str
|
||||
DATABASE_PASSWORD: str
|
||||
DATABASE_HOST: str
|
||||
DATABASE_PORT: int
|
||||
DATABASE_NAME: str
|
||||
STORAGE_ROOT: typing.Optional[str] = "."
|
||||
|
||||
def __init__(self, *_, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
def extract_settings(env: str, imported_module) -> Settings:
|
||||
"""
|
||||
Extracts all the exposed values from the given module and
|
||||
creates a corresponding Settings object.
|
||||
"""
|
||||
imported_values = {
|
||||
k: v for k, v in imported_module.__dict__.items() if not k.startswith("__")
|
||||
}
|
||||
return Settings(ENV=env, **imported_values)
|
||||
|
||||
|
||||
if IS_CI is not None:
|
||||
from envs.ci import *
|
||||
import envs.ci as ci_config
|
||||
|
||||
settings = extract_settings("ci", ci_config)
|
||||
elif IS_TEST is not None:
|
||||
from envs.test import *
|
||||
import envs.test as test_config
|
||||
|
||||
settings = extract_settings("ci", test_config)
|
||||
else:
|
||||
from envs.local import *
|
||||
import envs.local as local_config
|
||||
|
||||
settings = extract_settings("ci", local_config)
|
||||
|
|
|
@ -5,10 +5,18 @@ Use cases and data structures defined in this file
|
|||
manipulate file records in the database or represent them
|
||||
after they have been read.
|
||||
"""
|
||||
import typing
|
||||
import pathlib
|
||||
|
||||
import typing_extensions as typing
|
||||
|
||||
from db import get_connection
|
||||
from settings import settings
|
||||
|
||||
|
||||
class DoesNotExist(Exception):
|
||||
"""
|
||||
General purpose exception signalling a failure to find a database record.
|
||||
"""
|
||||
|
||||
|
||||
class FileRecord(typing.TypedDict):
|
||||
|
@ -84,3 +92,25 @@ def get_file_record_by_id(file_id: str) -> typing.Optional[FileRecord]:
|
|||
return FileRecord(
|
||||
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
|
||||
)
|
||||
|
||||
|
||||
def delete_file_record_by_id(file_id: str) -> typing.Union[typing.NoReturn, FileRecord]:
|
||||
"""
|
||||
Deletes a single file by ID, including its presence in storage.
|
||||
|
||||
If the ID doesn't correspond to a record, DoesNotExist is raised.
|
||||
"""
|
||||
|
||||
row = None
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute("DELETE FROM files WHERE id=%s RETURNING *;", (file_id,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row is None:
|
||||
raise DoesNotExist()
|
||||
|
||||
pathlib.Path(pathlib.Path(settings.STORAGE_ROOT, row[1])).unlink()
|
||||
|
||||
return FileRecord(
|
||||
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
|
||||
)
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
import unittest.mock
|
||||
|
||||
from rotini.main import app
|
||||
from rotini.db import get_connection
|
||||
|
||||
from settings import settings
|
||||
|
||||
|
||||
@pytest.fixture(name="client")
|
||||
def fixture_client():
|
||||
|
@ -12,5 +15,21 @@ def fixture_client():
|
|||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_database():
|
||||
"""
|
||||
Empties all user tables between tests.
|
||||
"""
|
||||
with get_connection() as conn, conn.cursor() as cursor:
|
||||
cursor.execute("DELETE FROM files;")
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def set_storage_path(tmp_path, monkeypatch):
|
||||
"""
|
||||
Ensures that files stored by tests are stored
|
||||
in temporary directories.
|
||||
"""
|
||||
|
||||
files_dir = tmp_path / "files"
|
||||
files_dir.mkdir()
|
||||
|
||||
monkeypatch.setattr(settings, "STORAGE_ROOT", str(files_dir))
|
||||
|
|
|
@ -1,23 +1,43 @@
|
|||
from rotini.use_cases import files as files_use_cases
|
||||
import pathlib
|
||||
|
||||
|
||||
def test_list_files_returns_registered_files_and_200(client):
|
||||
file_1 = files_use_cases.create_file_record("/test1.txt", 123)
|
||||
file_2 = files_use_cases.create_file_record("/test2.txt", 456)
|
||||
def test_list_files_returns_registered_files_and_200(client, tmp_path):
|
||||
mock_file_1 = tmp_path / "test1.txt"
|
||||
mock_file_1.write_text("testtest")
|
||||
|
||||
with open(str(mock_file_1), "rb") as mock_file_stream:
|
||||
response = client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
mock_file_1_data = response.json()
|
||||
|
||||
mock_file_2 = tmp_path / "test2.txt"
|
||||
mock_file_2.write_text("testtest")
|
||||
|
||||
with open(str(mock_file_2), "rb") as mock_file_stream:
|
||||
response = client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
mock_file_2_data = response.json()
|
||||
|
||||
response = client.get("/files/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == [file_1, file_2]
|
||||
assert response.json() == [mock_file_1_data, mock_file_2_data]
|
||||
|
||||
|
||||
def test_file_details_returns_specified_file_and_200(client):
|
||||
file = files_use_cases.create_file_record("/test1.txt", 123)
|
||||
def test_file_details_returns_specified_file_and_200(client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
response = client.get(f"/files/{file['id']}/")
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
created_file_id = response_data["id"]
|
||||
|
||||
response = client.get(f"/files/{created_file_id}/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == file
|
||||
assert response.json() == response_data
|
||||
|
||||
|
||||
def test_file_details_returns_404_if_does_not_exist(client):
|
||||
|
@ -25,3 +45,50 @@ def test_file_details_returns_404_if_does_not_exist(client):
|
|||
response = client.get(f"/files/{non_existent_id}/")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_file_deletion_returns_404_if_does_not_exist(client):
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = client.delete(f"/files/{non_existent_id}/")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_file_deletion_deletes_record_and_file(client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
file_path = response_data["path"]
|
||||
|
||||
assert pathlib.Path(file_path).exists()
|
||||
response = client.get(f"/files/{file_id}/")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
client.delete(f"/files/{file_id}/")
|
||||
assert not pathlib.Path(file_path).exists()
|
||||
|
||||
response = client.get(f"/files/{file_id}/")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_file_deletion_200_and_return_deleted_resource(client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
|
||||
response = client.delete(f"/files/{file_id}/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == response_data
|
||||
|
|
Reference in a new issue