feat(backend): sub fetch endpoints (#2)

* feat(backend): sub fetch endpoints

* ci: skip frontend/backend if no frontend change

* ci: backend formatting

* build: tooling and commands

* build: psycopg2

* build: dependency locking tooling

* build: pylint

* feat: migration handling

* feat: rudimentary upload flow

* feat: basic file create/read interactions

* docs: stubs

* build: migration command
This commit is contained in:
Marc 2023-08-08 23:49:54 -04:00 committed by GitHub
parent 25ae69f6f4
commit bf7431f747
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 1395 additions and 2 deletions

View file

@ -11,9 +11,91 @@ env:
CI: 1 CI: 1
jobs: jobs:
preflight:
runs-on: ubuntu-latest
name: Preflight checks
outputs:
be_changed: ${{ steps.be-changes.outputs.be_changed }}
fe_changed: ${{ steps.fe-changes.outputs.fe_changed }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Change check (frontend)
id: fe-changes
run: |
git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./frontend
if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./frontend)" ]
then
echo "fe_changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Change check (backend)
id: be-changes
run: |
git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./backend
if [ -n "$(git diff --name-only origin/main origin/${GITHUB_HEAD_REF} -- ./backend)" ]
then
echo "be_changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Summary
run: |
echo ${{ steps.be-changes.outputs.be_changed }}
echo ${{ steps.fe-changes.outputs.fe_changed }}
be-setup:
runs-on: ubuntu-latest
name: Setup (backend)
needs: preflight
if: needs.preflight.outputs.be_changed == 'true'
defaults:
run:
working-directory: backend
steps:
- uses: actions/checkout@v3
- name: get-python-version
run: |
echo "python_version=$(cat .python-version)" >> $GITHUB_OUTPUT
- uses: actions/setup-python@v3
with:
python-version: ${{ steps.get-python-version.outputs.python_version }}
- uses: actions/cache@v3
id: cache-restore
with:
path: |
.venv
key: ${{ runner.os }}-${{ hashFiles('**/requirements*.txt') }}-${{ steps.get-python-version.outputs.python_version }}
- name: Install dependencies
if: steps.cache-restore.outputs.cache-hit != 'true'
run: . script/bootstrap
be-lint:
runs-on: ubuntu-latest
name: Lint (backend)
needs: be-setup
defaults:
run:
working-directory: backend
steps:
- uses: actions/checkout@v3
- name: get-python-version
run: |
echo "python_version=$(cat .python-version)" >> $GITHUB_OUTPUT
- uses: actions/setup-python@v3
with:
python-version: ${{ steps.get-python-version.outputs.python_version }}
- uses: actions/cache@v3
id: cache-restore
with:
path: |
.venv
key: ${{ runner.os }}-${{ hashFiles('**/requirements*.txt') }}-${{ steps.get-python-version.outputs.python_version }}
- name: Lint
run: |
. script/bootstrap
black . --check
fe-setup: fe-setup:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Setup (frontend) name: Setup (frontend)
needs: preflight
if: needs.preflight.outputs.fe_changed == 'true'
defaults: defaults:
run: run:
working-directory: frontend working-directory: frontend
@ -33,7 +115,7 @@ jobs:
run: . script/bootstrap run: . script/bootstrap
fe-lint: fe-lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Lint name: Lint (frontend)
defaults: defaults:
run: run:
working-directory: frontend working-directory: frontend
@ -56,7 +138,7 @@ jobs:
yarn lint yarn lint
fe-test: fe-test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Test name: Test (frontend)
defaults: defaults:
run: run:
working-directory: frontend working-directory: frontend

5
.gitignore vendored
View file

@ -7,6 +7,11 @@ yarn-error.log*
lerna-debug.log* lerna-debug.log*
.pnpm-debug.log* .pnpm-debug.log*
bin
.venv
**/*.pyc
*/.pnp.* */.pnp.*
*/.yarn/* */.yarn/*
!*/.yarn/patches !*/.yarn/patches

View file

@ -1,2 +1,7 @@
# rotini # rotini
An unnamed cloud storage app An unnamed cloud storage app
## Development
Run `. script/bootstrap` to enable `task` commands that make everything else simpler. Then, `task -l` will list out
available utilities.

52
Taskfile.yml Normal file
View file

@ -0,0 +1,52 @@
version: '3'
tasks:
be.bootstrap:
internal: true
cmds:
- . script/bootstrap
dir: backend
be.lint:
desc: "Lints /backend using black + pylint."
deps: [be.bootstrap]
cmds:
- black . --check
- pylint
dir: backend
be.lintfix:
desc: "Lints and fixes /backend using black + pylint."
deps: [be.bootstrap]
cmds:
- black .
- pylint rotini
dir: backend
be.start:
desc: "Starts the backend application."
deps: [be.bootstrap]
cmds:
- source ../.venv/bin/activate && source ../.env && python -m uvicorn main:app
dir: backend/rotini
be.migrate:
desc: "Applies migrations. Usage: be.migrate -- <up|down>"
deps: [be.bootstrap]
cmds:
- source ../../.venv/bin/activate && source ../../.env && python migrate.py {{.CLI_ARGS}}
dir: backend/rotini/migrations
be.lock-deps:
desc: "Locks production and development dependencies"
deps: [be.bootstrap]
cmds:
- source .venv/bin/activate && . script/requirements-lock
dir: backend
fe.bootstrap:
internal: true
cmds:
- . script/bootstrap
dir: frontend
fe.start:
desc: "Starts the frontend application."
deps: [fe.bootstrap]
cmds:
- yarn start
dir: frontend

635
backend/.pylintrc Normal file
View file

@ -0,0 +1,635 @@
[MAIN]
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
# in a server-like mode.
clear-cache-post-run=no
# Load and enable all available extensions. Use --list-extensions to see a list
# all available extensions.
#enable-all-extensions=
# In error mode, messages with a category besides ERROR or FATAL are
# suppressed, and no reports are done by default. Error mode is compatible with
# disabling specific errors.
#errors-only=
# Always return a 0 (non-error) status code, even if lint errors are found.
# This is primarily useful in continuous integration scripts.
#exit-zero=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold under which the program will exit with error.
fail-under=10
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
#from-stdin=
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems,
# it can't be used as an escape character.
ignore-paths=^\/.venv
# Files or directories matching the regular expression patterns are skipped.
# The regex matches against base names, not paths. The default value ignores
# Emacs file locks
ignore-patterns=^\.#
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.10
# Discover python modules and packages in the file system subtree.
recursive=no
# Add paths to the list of the source roots. Supports globbing patterns. The
# source root is an absolute path or a path relative to the current working
# directory used to determine a package namespace for modules located under the
# source root.
source-roots=rotini
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# In verbose mode, extra non-checker-related info will be displayed.
#verbose=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type alias names. If left empty, type
# alias names will be checked with the set naming style.
#typealias-rgx=
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
asyncSetUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
exclude-too-few-public-methods=
# List of qualified class names to ignore when counting class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions=builtins.BaseException,builtins.Exception
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow explicit reexports by alias from a package __init__.
allow-reexport-from-package=no
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
# UNDEFINED.
confidence=HIGH,
CONTROL_FLOW,
INFERENCE,
INFERENCE_FAILURE,
UNDEFINED
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
invalid-name,
missing-function-docstring,
missing-module-docstring,
too-many-locals
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[METHOD_ARGS]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
notes-rgx=
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each
# category, as well as 'statement' which is the total number of statements
# analyzed. This score is used by the global evaluation report (RP0004).
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
#output-format=
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=yes
# Signatures are removed from the similarity computation
ignore-signatures=yes
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. No available dictionaries : You need to install
# both the python package and the system dependency for enchant to work..
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins=no-member,
not-async-context-manager,
not-context-manager,
attribute-defined-outside-init
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx=.*[Mm]ixin
# List of decorators that change the signature of a decorated function.
signature-mutators=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io

1
backend/.python-version Normal file
View file

@ -0,0 +1 @@
3.10.11

16
backend/README.md Normal file
View file

@ -0,0 +1,16 @@
# Rotini backend
## Development
Before starting, make sure to run the [root bootstrap script](../README.md#Development) so the `task` commands are enabled.
Locally, a Postgres database that can be used for development can be started via `docker-compose up -d`.
An envfile should be present at `.env` and should define:
- `DATABASE_USERNAME`, the username to initialize the DB user with;
- `DATABASE_PASSWORD`, the password to assign to that test user;
- `DATABASE_HOST`, the host on which the database runs;
- `DATABASE_PORT`, the port on which the database runs;
- `DATABASE_NAME`, name of the database within the Postgres instance.

View file

@ -0,0 +1,11 @@
version: '3.7'
services:
database:
image: postgres:15.3
env_file:
- .env
ports:
- 5432:5432
environment:
POSTGRES_PASSWORD: $DATABASE_PASSWORD

4
backend/requirements.in Normal file
View file

@ -0,0 +1,4 @@
fastapi~=0.101
uvicorn[standard]
python-multipart
psycopg2

48
backend/requirements.txt Normal file
View file

@ -0,0 +1,48 @@
annotated-types==0.5.0
# via pydantic
anyio==3.7.1
# via
# starlette
# watchfiles
click==8.1.6
# via uvicorn
exceptiongroup==1.1.2
# via anyio
fastapi==0.101.0
# via -r requirements.in
h11==0.14.0
# via uvicorn
httptools==0.6.0
# via uvicorn
idna==3.4
# via anyio
psycopg2==2.9.7
# via -r requirements.in
pydantic==2.1.1
# via fastapi
pydantic-core==2.4.0
# via pydantic
python-dotenv==1.0.0
# via uvicorn
python-multipart==0.0.6
# via -r requirements.in
pyyaml==6.0.1
# via uvicorn
sniffio==1.3.0
# via anyio
starlette==0.27.0
# via fastapi
typing-extensions==4.7.1
# via
# fastapi
# pydantic
# pydantic-core
# uvicorn
uvicorn[standard]==0.23.2
# via -r requirements.in
uvloop==0.17.0
# via uvicorn
watchfiles==0.19.0
# via uvicorn
websockets==11.0.3
# via uvicorn

View file

@ -0,0 +1,4 @@
-c requirements.txt
black~=23.7.0
pylint~=2.17.0

View file

@ -0,0 +1,40 @@
astroid==2.15.6
# via pylint
black==23.7.0
# via -r requirements_dev.in
click==8.1.6
# via
# -c requirements.txt
# black
dill==0.3.7
# via pylint
isort==5.12.0
# via pylint
lazy-object-proxy==1.9.0
# via astroid
mccabe==0.7.0
# via pylint
mypy-extensions==1.0.0
# via black
packaging==23.1
# via black
pathspec==0.11.2
# via black
platformdirs==3.10.0
# via
# black
# pylint
pylint==2.17.5
# via -r requirements_dev.in
tomli==2.0.1
# via
# black
# pylint
tomlkit==0.12.1
# via pylint
typing-extensions==4.7.1
# via
# -c requirements.txt
# astroid
wrapt==1.15.0
# via astroid

View file

View file

View file

@ -0,0 +1,42 @@
"""
Files API.
This API allows users to create and query for existing data about
files that live in the system.
"""
from fastapi import APIRouter, HTTPException, UploadFile
import use_cases.files as files_use_cases
router = APIRouter(prefix="/files")
@router.get("/")
def list_files():
return files_use_cases.get_all_file_records()
@router.post("/")
async def upload_file(file: UploadFile):
content = await file.read()
size = len(content)
await file.seek(0)
with open(file.filename, "wb") as f:
content = await file.read()
f.write(content)
created_record = files_use_cases.create_file_record(file.filename, size)
return created_record
@router.get("/{file_id}/")
def get_file_details(file_id: str):
file = files_use_cases.get_file_record_by_id(file_id)
if file is None:
return HTTPException(status_code=404)
return file

16
backend/rotini/db.py Normal file
View file

@ -0,0 +1,16 @@
import os
import psycopg2
def get_connection():
"""
Create a database connection.
"""
return psycopg2.connect(
user=os.environ["DATABASE_USERNAME"],
password=os.environ["DATABASE_PASSWORD"],
host=os.environ["DATABASE_HOST"],
port=os.environ["DATABASE_PORT"],
database=os.environ["DATABASE_NAME"],
)

26
backend/rotini/main.py Normal file
View file

@ -0,0 +1,26 @@
"""
Rotini: a self-hosted cloud storage & productivity app.
"""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import api.files
app = FastAPI()
origins = ["http://localhost:1234"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api.files.router)
@app.get("/", status_code=204)
def healthcheck():
pass

View file

@ -0,0 +1,254 @@
"""
Migration handler.
This module handles database migrations.
Migrations are expected to be Python files of the format:
```
UID = <UUID, current migration>
PARENT = <UUID, migration that must be applied before present>
MESSAGE = <str, message>
UP_SQL = <SQL>
DOWN_SQL = <SQL>
```
where UP_SQL is the change the migration represents and DOWN_SQL its inverse.
Usage:
python migrate.py <up|down|new> [<migration_name, if new>]
Not including a migration name executes everything from the last executed
migration.
"""
import os
import collections
import pathlib
import datetime
import uuid
import typing
import importlib
import sys
import psycopg2
VALID_COMMANDS = ["up", "down", "new"]
DIRECTION_UP = 1
DIRECTION_DOWN = -1
# UUID attached to a migration.
MigrationID = str
# Filename (without ext.) of a migration.
MigrationModuleName = str
MigrationItem = collections.namedtuple("MigrationItem", "id module")
def _get_connection():
"""
Create a database connection.
"""
return psycopg2.connect(
user=os.environ["DATABASE_USERNAME"],
password=os.environ["DATABASE_PASSWORD"],
host=os.environ["DATABASE_HOST"],
port=os.environ["DATABASE_PORT"],
database=os.environ["DATABASE_NAME"],
)
def _ensure_migration_table():
"""
Ensure that the migration tracking table exists.
"""
connection = _get_connection()
maybe_create_sql = """
CREATE TABLE IF NOT EXISTS migrations_lastapplied (
migration_uid text NOT NULL
);
"""
with connection:
with connection.cursor() as cursor:
cursor.execute(maybe_create_sql)
def _get_migration_sequence() -> typing.List[MigrationItem]:
"""
Collects migration files and builds a historical
timeline.
This will detect duplicates and breaks in the sequence
and raise if the history is not linear and complete.
"""
migrations_dir = pathlib.Path(".")
migrations: typing.Dict[MigrationID, MigrationModuleName] = {}
dependency_map: typing.Dict[MigrationID, MigrationID] = {}
for file in migrations_dir.iterdir():
if file.name.startswith("migration_") and file.suffix == ".py":
migration = importlib.import_module(file.stem)
migration_id = migration.UID
migration_parent = migration.PARENT
if migration_id in migrations:
raise RuntimeError("Duplicate migrations.")
if migration_parent in dependency_map:
raise RuntimeError("History must be linear.")
migrations[migration_id] = str(file.stem)
dependency_map[migration_parent] = migration_id
if not dependency_map:
print("No migrations yet!")
return []
root_id = dependency_map["None"]
history: typing.List[MigrationItem] = [MigrationItem(root_id, migrations[root_id])]
while history:
next_id = dependency_map.get(history[-1].id)
if next_id is None:
break
history.append(MigrationItem(next_id, migrations[next_id]))
return history
def migrate(direction: typing.Union[typing.Literal[1], typing.Literal[-1]]):
"""
Runs a migration (expected to be in the current directory
and labeled 'migration_<label>.py'.
"""
_ensure_migration_table()
connection = _get_connection()
full_history, applied_migrations = _get_migration_sequence(), []
last_applied = None
with connection, connection.cursor() as cursor:
cursor.execute('SELECT migration_uid FROM "migrations_lastapplied"')
last_applied_row = cursor.fetchone()
last_applied = last_applied_row[0] if last_applied_row else None
full_history_ids = [migration.id for migration in full_history]
if last_applied is not None and last_applied not in full_history_ids:
raise RuntimeError("Last applied migration is not in history.")
for migration_item in full_history:
if last_applied is None:
break
applied_migrations.append(migration_item)
if last_applied is not None and migration_item.id == last_applied:
break
migrations_to_apply = (
full_history[len(applied_migrations) :]
if direction == DIRECTION_UP
else list(reversed(applied_migrations))
)
collected_sql = []
for migration_item in migrations_to_apply:
migration = importlib.import_module(migration_item.module)
migration_sql = (
migration.UP_SQL if direction == DIRECTION_UP else migration.DOWN_SQL
)
collected_sql.append(migration_sql)
print(f"Collected {migration_item.module}: {migration.MESSAGE}")
with connection, connection.cursor() as cursor:
for pos, sql in enumerate(collected_sql):
print(f"Applying {migrations_to_apply[pos][1]}")
cursor.execute(sql)
next_last_applied = (
None if direction == DIRECTION_DOWN else migrations_to_apply[-1].id
)
if next_last_applied is None:
cursor.execute("DELETE FROM migrations_lastapplied;")
elif last_applied is None:
cursor.execute(
"INSERT INTO migrations_lastapplied (migration_uid) VALUES (%s);",
(next_last_applied,),
)
else:
cursor.execute(
"UPDATE migrations_lastapplied SET migration_uid = %s",
(next_last_applied,),
)
def create_migration_file(label: str, message: typing.Optional[str]):
"""
Create a new migration file with with a dependency on the last migration
in history.
"""
migration_seq = _get_migration_sequence()
print("Found migrations:")
for migration_id, migration_file in migration_seq:
print(f"{migration_id}: {migration_file}")
parent_uid = migration_seq[-1][0] if migration_seq else None
migration_uid = str(uuid.uuid4())
now = datetime.datetime.now().isoformat()
content = f"""\"\"\"
Generated: {now}
Message: {message}
\"\"\"
UID = "{migration_uid}"
PARENT = "{parent_uid}"
MESSAGE = "{message}"
UP_SQL = \"\"\" \"\"\"
DOWN_SQL = \"\"\" \"\"\"
"""
migration_filename = f"migration_{len(migration_seq)}_{label}.py"
with open(migration_filename, "w", encoding="utf8") as migration_file:
migration_file.write(content)
print(f"Created {migration_filename}.")
if __name__ == "__main__":
if len(sys.argv) < 2:
raise RuntimeError("Supply up/down as a first argument.")
if sys.argv[1] not in VALID_COMMANDS:
raise RuntimeError("Invalid commands.")
arguments = sys.argv[1:]
COMMAND = arguments[0]
MIGRATION_NAME = arguments[1] if len(arguments) >= 2 else None
MIGRATION_MESSAGE = arguments[2] if len(arguments) == 3 else None
if COMMAND == "up":
migrate(DIRECTION_UP)
elif COMMAND == "down":
migrate(DIRECTION_DOWN)
elif COMMAND == "new":
create_migration_file(MIGRATION_NAME, MIGRATION_MESSAGE)

View file

@ -0,0 +1,20 @@
"""
Generated: 2023-08-07T16:14:11.314059
Message: Files table initial migration
"""
UID = "06f02980-864d-4832-a894-2e9d2543a79a"
PARENT = "None"
MESSAGE = "Files table initial migration"
UP_SQL = """CREATE TABLE
files
(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
path text NOT NULL,
size bigint NOT NULL
);
"""
DOWN_SQL = """DROP TABLE files;"""

View file

@ -0,0 +1,16 @@
"""
Creates the initial files table.
"""
UP_SQL = """CREATE TABLE
files
(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
path text NOT NULL,
size bigint NOT NULL
);
"""
DOWN_SQL = """
DROP TABLE files
"""

View file

@ -0,0 +1,86 @@
"""
File-related use cases.
Use cases and data structures defined in this file
manipulate file records in the database or represent them
after they have been read.
"""
import typing
import pathlib
from db import get_connection
class FileRecord(typing.TypedDict):
"""
Database record associated with a file tracked
by the system.
"""
id: str
size: int
path: str
filename: str
def create_file_record(path: str, size: int) -> FileRecord:
"""
Creates a record representing an uploaded file in the database.
The record itself does not ensure that the file exists on disk, but just
that it's tracked by the system.
"""
inserted_id = None
with get_connection() as connection, connection.cursor() as cursor:
cursor.execute(
"INSERT INTO files (path, size) VALUES (%s, %s) RETURNING id", (path, size)
)
inserted_id = cursor.fetchone()
filename = pathlib.Path(path).name
return FileRecord(id=inserted_id, size=size, path=path, filename=filename)
def get_all_file_records() -> typing.Tuple[FileRecord]:
"""
Fetches all availables files from the database.
"""
rows = None
with get_connection() as connection, connection.cursor() as cursor:
cursor.execute("SELECT * FROM files;")
rows = cursor.fetchall()
if rows is None:
raise RuntimeError("Failed to get files.")
return (
FileRecord(
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
)
for row in rows
)
def get_file_record_by_id(file_id: str) -> typing.Optional[FileRecord]:
"""
Fetches a single file by ID.
If the ID doesn't correspond to a record, None is returned.
"""
row = None
with get_connection() as connection, connection.cursor() as cursor:
cursor.execute("SELECT * FROM files WHERE id=%s;", (file_id,))
row = cursor.fetchone()
if row is None:
return None
return FileRecord(
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
)

9
backend/script/bootstrap Normal file
View file

@ -0,0 +1,9 @@
#!/bin/bash
python -m venv .venv
. .venv/bin/activate
pip install -U pip==23.0.0 pip-tools==7.1.0
pip-sync requirements.txt requirements_dev.txt

View file

@ -0,0 +1,3 @@
#!/bin/bash
pip-compile requirements.in --no-header && pip-compile requirements_dev.in --no-header

18
script/bootstrap Normal file
View file

@ -0,0 +1,18 @@
#!/bin/bash
TASK_VERSION="v3.28.0"
BIN_PATH=$PWD/bin
if [[ ! -f $BIN_PATH/task || -z "$($BIN_PATH/task --version | grep $TASK_VERSION)" ]]; then
sh -c "$(curl --location https://taskfile.dev/install.sh)" -- $TASK_VERSION -d
fi
BIN_PATH=$PWD/bin
BIN_MATCH=$(echo $PATH | grep "\:$BIN_PATH")
if [[ -z $BIN_MATCH ]]; then
echo "Adding Task to \$PATH."
export PATH=$PATH:$BIN_PATH
fi
echo "All done!"