refactor: FastAPI -> Django migration (#41)
* build(backend): set up Django 4.2 scaffolding * feat: porting files API feat: file ownership basics feat: gaps in test compatibility * test: port files API tests * fix(tests): test database port 5432>5431 to avoid conflict with application database * feat(auth): LoginView, middleware to handle JWT bearer tokens * refactor: clean up old FastAPI logic, temporary utils * refactor: resolve LoginView linting * feat: user creation + test coverage * test: session creation coverage * refactor: hoist secrets, replace placeholders * chore: clear linting errors+warns
This commit is contained in:
parent
a737e954aa
commit
0242b2d5ff
59 changed files with 1060 additions and 2140 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -7,6 +7,9 @@ yarn-error.log*
|
|||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Env files
|
||||
backend.env
|
||||
|
||||
.task
|
||||
bin
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ tasks:
|
|||
- "{{ .VENV_BIN }}/black . --check"
|
||||
- "{{ .VENV_BIN }}/pylint ./rotini"
|
||||
dir: backend
|
||||
dotenv:
|
||||
- ../backend-test.env
|
||||
lintfix:
|
||||
desc: "Lints and fixes /backend using black + pylint."
|
||||
deps: [bootstrap]
|
||||
|
@ -32,15 +34,19 @@ tasks:
|
|||
- "{{ .VENV_BIN }}/black ."
|
||||
- "{{ .VENV_BIN }}/pylint ./rotini"
|
||||
dir: backend
|
||||
dotenv:
|
||||
- ../backend-test.env
|
||||
test:
|
||||
desc: "Run the test suites."
|
||||
deps: [bootstrap]
|
||||
cmd: . script/test
|
||||
dir: backend
|
||||
dotenv:
|
||||
- ../backend-test.env
|
||||
start:
|
||||
desc: "Starts the backend application."
|
||||
deps: [docker-build]
|
||||
cmd: docker run -d -p 8000:8000 --name {{ .APP_CONTAINER_NAME }} {{ .CLI_ARGS }} --add-host docker.host.internal:host-gateway rotini:dev
|
||||
cmd: docker run -d -p 8000:8000 --name {{ .APP_CONTAINER_NAME }} {{ .CLI_ARGS }} --add-host docker.host.internal:host-gateway --env-file ../../backend.env rotini:dev
|
||||
dir: backend/rotini
|
||||
stop:
|
||||
desc: "Stops the backend application."
|
||||
|
|
2
backend-test.env
Normal file
2
backend-test.env
Normal file
|
@ -0,0 +1,2 @@
|
|||
DJANGO_SECRET_KEY="notakey"
|
||||
JWT_SIGNING_SECRET="notasecret"
|
|
@ -1,638 +0,0 @@
|
|||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=^\/.venv
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.10
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=rotini
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body.
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body.
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body.
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
invalid-name,
|
||||
missing-function-docstring,
|
||||
missing-module-docstring,
|
||||
too-many-locals,
|
||||
line-too-long,
|
||||
too-few-public-methods,
|
||||
fixme
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
|
@ -1,6 +1,4 @@
|
|||
ARG PYTHON_VERSION
|
||||
|
||||
FROM python:$PYTHON_VERSION-slim
|
||||
FROM python:3.11-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
|
@ -21,4 +19,4 @@ COPY ./rotini ./rotini
|
|||
|
||||
WORKDIR ./rotini
|
||||
|
||||
CMD python3 -m uvicorn main:app --host 0.0.0.0
|
||||
CMD python3 -m uvicorn base.asgi:application --host 0.0.0.0
|
||||
|
|
|
@ -3,14 +3,12 @@ name = "rotini"
|
|||
version = "0.0.0"
|
||||
requires-python = ">= 3.10"
|
||||
dependencies = [
|
||||
"fastapi",
|
||||
"uvicorn[standard]",
|
||||
"python-multipart",
|
||||
"typing_extensions",
|
||||
"pydantic",
|
||||
"pyjwt",
|
||||
"argon2-cffi",
|
||||
"psycopg2",
|
||||
"django",
|
||||
"djangorestframework",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
@ -18,15 +16,46 @@ dev = [
|
|||
"anyio",
|
||||
"black",
|
||||
"pylint",
|
||||
"pylint_django",
|
||||
"pytest-django",
|
||||
"pytest",
|
||||
"httpx",
|
||||
]
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["rotini"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
DJANGO_SETTINGS_MODULE="base.settings"
|
||||
pythonpath=[
|
||||
".",
|
||||
"./rotini",
|
||||
]
|
||||
python_files=[
|
||||
"*_test.py"
|
||||
]
|
||||
|
||||
[tool.pylint.'MASTER']
|
||||
load-plugins="pylint_django"
|
||||
django-settings-module="base.settings"
|
||||
|
||||
[tool.pylint.main]
|
||||
ignore-paths = ["^\\\\.venv|^/.venv"]
|
||||
ignore-patterns = ["^\\.#"]
|
||||
py-version = "3.11"
|
||||
source-roots = ["rotini"]
|
||||
suggestion-mode = true
|
||||
|
||||
[tool.pylint.format]
|
||||
max-line-length = 100
|
||||
|
||||
[tool.pylint."messages control"]
|
||||
disable = ["missing-class-docstring", "too-many-ancestors", "raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "invalid-name", "missing-function-docstring", "missing-module-docstring", "too-many-locals", "line-too-long", "too-few-public-methods", "fixme"]
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where it
|
||||
# should appear only once). See also the "--disable" option for examples.
|
||||
enable = ["c-extension-no-member"]
|
||||
|
||||
[tool.pylint.similarities]
|
||||
min-similarity-lines = 10
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
annotated-types==0.5.0
|
||||
# via pydantic
|
||||
anyio==3.7.1
|
||||
# via
|
||||
# starlette
|
||||
# watchfiles
|
||||
argon2-cffi==23.1.0
|
||||
# via rotini (pyproject.toml)
|
||||
argon2-cffi-bindings==21.2.0
|
||||
# via argon2-cffi
|
||||
cffi==1.15.1
|
||||
# via argon2-cffi-bindings
|
||||
# via watchfiles
|
||||
asgiref==3.7.2
|
||||
# via django
|
||||
click==8.1.6
|
||||
# via uvicorn
|
||||
django==4.2.7
|
||||
# via
|
||||
# djangorestframework
|
||||
# rotini (pyproject.toml)
|
||||
djangorestframework==3.14.0
|
||||
# via rotini (pyproject.toml)
|
||||
exceptiongroup==1.1.2
|
||||
# via anyio
|
||||
fastapi==0.101.0
|
||||
# via rotini (pyproject.toml)
|
||||
h11==0.14.0
|
||||
# via uvicorn
|
||||
httptools==0.6.0
|
||||
|
@ -24,32 +20,23 @@ idna==3.4
|
|||
# via anyio
|
||||
psycopg2==2.9.7
|
||||
# via rotini (pyproject.toml)
|
||||
pycparser==2.21
|
||||
# via cffi
|
||||
pydantic==2.1.1
|
||||
# via
|
||||
# fastapi
|
||||
# rotini (pyproject.toml)
|
||||
pydantic-core==2.4.0
|
||||
# via pydantic
|
||||
pyjwt==2.8.0
|
||||
# via rotini (pyproject.toml)
|
||||
python-dotenv==1.0.0
|
||||
# via uvicorn
|
||||
python-multipart==0.0.6
|
||||
# via rotini (pyproject.toml)
|
||||
pytz==2023.3.post1
|
||||
# via djangorestframework
|
||||
pyyaml==6.0.1
|
||||
# via uvicorn
|
||||
sniffio==1.3.0
|
||||
# via anyio
|
||||
starlette==0.27.0
|
||||
# via fastapi
|
||||
sqlparse==0.4.4
|
||||
# via django
|
||||
typing-extensions==4.7.1
|
||||
# via
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# rotini (pyproject.toml)
|
||||
# asgiref
|
||||
# uvicorn
|
||||
uvicorn[standard]==0.23.2
|
||||
# via rotini (pyproject.toml)
|
||||
|
|
|
@ -1,34 +1,16 @@
|
|||
annotated-types==0.5.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# pydantic
|
||||
anyio==3.7.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# httpcore
|
||||
# rotini (pyproject.toml)
|
||||
# starlette
|
||||
# watchfiles
|
||||
argon2-cffi==23.1.0
|
||||
asgiref==3.7.2
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# rotini (pyproject.toml)
|
||||
argon2-cffi-bindings==21.2.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# argon2-cffi
|
||||
# django
|
||||
astroid==2.15.6
|
||||
# via pylint
|
||||
black==23.7.0
|
||||
# via rotini (pyproject.toml)
|
||||
certifi==2023.7.22
|
||||
# via
|
||||
# httpcore
|
||||
# httpx
|
||||
cffi==1.15.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# argon2-cffi-bindings
|
||||
click==8.1.6
|
||||
# via
|
||||
# -c requirements.txt
|
||||
|
@ -36,33 +18,32 @@ click==8.1.6
|
|||
# uvicorn
|
||||
dill==0.3.7
|
||||
# via pylint
|
||||
django==4.2.7
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# djangorestframework
|
||||
# rotini (pyproject.toml)
|
||||
djangorestframework==3.14.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# rotini (pyproject.toml)
|
||||
exceptiongroup==1.1.2
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# anyio
|
||||
# pytest
|
||||
fastapi==0.101.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# rotini (pyproject.toml)
|
||||
h11==0.14.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# httpcore
|
||||
# uvicorn
|
||||
httpcore==0.17.3
|
||||
# via httpx
|
||||
httptools==0.6.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# uvicorn
|
||||
httpx==0.24.1
|
||||
# via rotini (pyproject.toml)
|
||||
idna==3.4
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# anyio
|
||||
# httpx
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
isort==5.12.0
|
||||
|
@ -89,26 +70,24 @@ psycopg2==2.9.7
|
|||
# via
|
||||
# -c requirements.txt
|
||||
# rotini (pyproject.toml)
|
||||
pycparser==2.21
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# cffi
|
||||
pydantic==2.1.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# fastapi
|
||||
# rotini (pyproject.toml)
|
||||
pydantic-core==2.4.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# pydantic
|
||||
pyjwt==2.8.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# rotini (pyproject.toml)
|
||||
pylint==2.17.5
|
||||
# via
|
||||
# pylint-django
|
||||
# pylint-plugin-utils
|
||||
# rotini (pyproject.toml)
|
||||
pylint-django==2.5.5
|
||||
# via rotini (pyproject.toml)
|
||||
pylint-plugin-utils==0.8.2
|
||||
# via pylint-django
|
||||
pytest==7.4.0
|
||||
# via
|
||||
# pytest-django
|
||||
# rotini (pyproject.toml)
|
||||
pytest-django==4.7.0
|
||||
# via rotini (pyproject.toml)
|
||||
python-dotenv==1.0.0
|
||||
# via
|
||||
|
@ -118,6 +97,10 @@ python-multipart==0.0.6
|
|||
# via
|
||||
# -c requirements.txt
|
||||
# rotini (pyproject.toml)
|
||||
pytz==2023.3.post1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# djangorestframework
|
||||
pyyaml==6.0.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
|
@ -126,12 +109,10 @@ sniffio==1.3.0
|
|||
# via
|
||||
# -c requirements.txt
|
||||
# anyio
|
||||
# httpcore
|
||||
# httpx
|
||||
starlette==0.27.0
|
||||
sqlparse==0.4.4
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# fastapi
|
||||
# django
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# black
|
||||
|
@ -142,11 +123,8 @@ tomlkit==0.12.1
|
|||
typing-extensions==4.7.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# asgiref
|
||||
# astroid
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# rotini (pyproject.toml)
|
||||
# uvicorn
|
||||
uvicorn[standard]==0.23.2
|
||||
# via
|
||||
|
|
6
backend/rotini/auth/apps.py
Normal file
6
backend/rotini/auth/apps.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AuthConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "auth"
|
|
@ -1,31 +0,0 @@
|
|||
"""
|
||||
Class declarations and constants for the auth module.
|
||||
"""
|
||||
import pydantic
|
||||
|
||||
|
||||
class LoginRequestData(pydantic.BaseModel):
|
||||
"""Payload for login requests"""
|
||||
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class CreateUserRequestData(pydantic.BaseModel):
|
||||
"""Payload for user creation"""
|
||||
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class IdentityTokenData(pydantic.BaseModel):
|
||||
"""Contents of an identity token"""
|
||||
|
||||
exp: int
|
||||
user_id: int
|
||||
username: str
|
||||
token_id: str
|
||||
|
||||
|
||||
class UsernameAlreadyExists(Exception):
|
||||
"""Signals a unique constraint violation on username values"""
|
|
@ -1,25 +0,0 @@
|
|||
import functools
|
||||
|
||||
import fastapi
|
||||
|
||||
|
||||
def requires_logged_in(func):
|
||||
"""
|
||||
Returns a 401 if the request received does not specify a logged
|
||||
in user in its state.
|
||||
|
||||
The state is added through auth.middleware functionality.
|
||||
|
||||
Note that this requires the endpoint to be aware of the fastapi.Request
|
||||
keyword argument passed to it.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(request: fastapi.Request, *args, **kwargs):
|
||||
if not hasattr(request.state, "user"):
|
||||
raise fastapi.HTTPException(status_code=401)
|
||||
|
||||
response = await func(request, *args, **kwargs)
|
||||
return response
|
||||
|
||||
return wrapper
|
37
backend/rotini/auth/jwt.py
Normal file
37
backend/rotini/auth/jwt.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
import datetime
|
||||
import uuid
|
||||
|
||||
import django.conf
|
||||
|
||||
import jwt
|
||||
|
||||
|
||||
def generate_token_for_user(user_id: int) -> str:
|
||||
"""
|
||||
Generates an identity token for a given user.
|
||||
"""
|
||||
token_data = {
|
||||
"exp": (datetime.datetime.now() + datetime.timedelta(seconds=120)).timestamp(),
|
||||
"user_id": user_id,
|
||||
"username": "yolo",
|
||||
"token_id": str(uuid.uuid4()),
|
||||
}
|
||||
|
||||
return jwt.encode(
|
||||
token_data, django.conf.settings.JWT_SIGNING_SECRET, algorithm="HS256"
|
||||
)
|
||||
|
||||
|
||||
def decode_token(
|
||||
token: str,
|
||||
):
|
||||
"""
|
||||
Decodes the given token.
|
||||
|
||||
This may raise if the token is expired or invalid.
|
||||
"""
|
||||
token_data = jwt.decode(
|
||||
token, django.conf.settings.JWT_SIGNING_SECRET, algorithms=["HS256"]
|
||||
)
|
||||
|
||||
return token_data
|
|
@ -1,42 +1,41 @@
|
|||
"""
|
||||
Authentication & authorization middleware logic.
|
||||
"""
|
||||
import logging
|
||||
|
||||
import jwt.exceptions
|
||||
from fastapi import Request
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
import django.http
|
||||
import django.contrib.auth
|
||||
|
||||
import auth.use_cases as auth_use_cases
|
||||
import auth.jwt
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AuthUser = django.contrib.auth.get_user_model()
|
||||
|
||||
class AuthenticationMiddleware(BaseHTTPMiddleware):
|
||||
|
||||
class JwtMiddleware:
|
||||
"""
|
||||
Decodes Authorization headers if present on the request and sets
|
||||
identifying fields in the request state.
|
||||
|
||||
This information is then leveraged by individual routes to determine
|
||||
authorization.
|
||||
Middleware that handles using credentials supplied via the authorization
|
||||
headers on requests to log users in seamlessly.
|
||||
"""
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
auth_header = request.headers.get("authorization")
|
||||
decoded_token = None
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
if auth_header is not None:
|
||||
_, token = auth_header.split(" ")
|
||||
def __call__(self, request: django.http.HttpRequest) -> django.http.HttpResponse:
|
||||
authorization_header = request.META.get("HTTP_AUTHORIZATION")
|
||||
|
||||
if authorization_header is not None:
|
||||
try:
|
||||
decoded_token = auth_use_cases.decode_token(token)
|
||||
except jwt.exceptions.ExpiredSignatureError as exc:
|
||||
logger.exception(exc)
|
||||
_, token = authorization_header.split(" ")
|
||||
decoded_token = auth.jwt.decode_token(token)
|
||||
|
||||
if decoded_token is not None:
|
||||
logger.info(decoded_token)
|
||||
request.state.user = {
|
||||
"username": decoded_token["username"],
|
||||
"user_id": decoded_token["user_id"],
|
||||
}
|
||||
logger.info("Token: %s\nDecoded token: %s", token, decoded_token)
|
||||
|
||||
return await call_next(request)
|
||||
user = AuthUser.objects.get(pk=decoded_token["user_id"])
|
||||
|
||||
request.user = user
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.exception(
|
||||
e, extra={"authorization_provided": authorization_header}
|
||||
)
|
||||
return django.http.HttpResponse(status=401)
|
||||
|
||||
return self.get_response(request)
|
||||
|
|
46
backend/rotini/auth/middleware_test.py
Normal file
46
backend/rotini/auth/middleware_test.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
import pytest
|
||||
import django.http
|
||||
import django.contrib.auth
|
||||
import auth.middleware
|
||||
import auth.jwt
|
||||
|
||||
AuthUser = django.contrib.auth.get_user_model()
|
||||
|
||||
|
||||
class HttpRequestWithUser(django.http.HttpRequest):
|
||||
"""HttpRequest type after user is added by middleware."""
|
||||
|
||||
user: AuthUser
|
||||
|
||||
|
||||
@pytest.fixture(name="jwt_middleware")
|
||||
def fixture_jwt_middleware():
|
||||
def _noop(_: django.http.HttpRequest):
|
||||
return django.http.HttpResponse()
|
||||
|
||||
return auth.middleware.JwtMiddleware(_noop)
|
||||
|
||||
|
||||
def test_middleware_returns_401_on_invalid_authorization_header(jwt_middleware):
|
||||
"""If authorization headers are present but cannot be validated, 401."""
|
||||
mock_request = django.http.HttpRequest()
|
||||
|
||||
mock_request.META["HTTP_AUTHORIZATION"] = "Bearer notatoken"
|
||||
response = jwt_middleware(mock_request)
|
||||
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
def test_middleware_adds_user_to_request_in_if_valid_token(
|
||||
jwt_middleware, test_user_credentials
|
||||
):
|
||||
"""If authorization headers are present and contain a valid JWT, sets user on request."""
|
||||
mock_request = HttpRequestWithUser()
|
||||
test_user = AuthUser.objects.get(username=test_user_credentials["username"])
|
||||
token = auth.jwt.generate_token_for_user(test_user.id)
|
||||
mock_request.META["HTTP_AUTHORIZATION"] = f"Bearer {token}"
|
||||
|
||||
response = jwt_middleware(mock_request)
|
||||
|
||||
assert response.status_code != 401
|
||||
assert mock_request.user == test_user
|
|
@ -1,68 +0,0 @@
|
|||
from fastapi import APIRouter, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from exceptions import DoesNotExist
|
||||
|
||||
import auth.use_cases as auth_use_cases
|
||||
import auth.base as auth_base
|
||||
|
||||
router = APIRouter(prefix="/auth")
|
||||
|
||||
|
||||
@router.post("/users/", status_code=201)
|
||||
async def create_user(payload: auth_base.CreateUserRequestData):
|
||||
"""
|
||||
POST /auth/users/
|
||||
|
||||
{
|
||||
username: string
|
||||
password: string
|
||||
}
|
||||
|
||||
201 { <UserData> }
|
||||
|
||||
If the user is created successfully, the user object is returned.
|
||||
|
||||
400 {}
|
||||
|
||||
If the username already exists, or the password is not adequate,
|
||||
400 is returned.
|
||||
"""
|
||||
try:
|
||||
user = auth_use_cases.create_new_user(
|
||||
username=payload.username, raw_password=payload.password
|
||||
)
|
||||
except auth_base.UsernameAlreadyExists as exc:
|
||||
raise HTTPException(status_code=400) from exc
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@router.post("/sessions/")
|
||||
async def log_in(payload: auth_base.LoginRequestData):
|
||||
"""
|
||||
Attempts to log a user in.
|
||||
|
||||
200 { <User> }
|
||||
|
||||
If the supplied credentials are correct, the user is returned.
|
||||
|
||||
401 {}
|
||||
|
||||
If the credentials are incorrect, immediate failure.
|
||||
"""
|
||||
|
||||
try:
|
||||
user = auth_use_cases.get_user(username=payload.username)
|
||||
except DoesNotExist as exc:
|
||||
raise HTTPException(status_code=401) from exc
|
||||
|
||||
if not auth_use_cases.validate_password_for_user(user["id"], payload.password):
|
||||
raise HTTPException(status_code=401)
|
||||
|
||||
token = auth_use_cases.generate_token_for_user(user)
|
||||
|
||||
return JSONResponse(
|
||||
content={"username": user["username"]},
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
9
backend/rotini/auth/urls.py
Normal file
9
backend/rotini/auth/urls.py
Normal file
|
@ -0,0 +1,9 @@
|
|||
import django.urls
|
||||
import auth.views
|
||||
|
||||
urlpatterns = [
|
||||
django.urls.path(
|
||||
"session/", auth.views.SessionListView.as_view(), name="auth-session-list"
|
||||
),
|
||||
django.urls.path("user/", auth.views.UserListView.as_view(), name="auth-user-list"),
|
||||
]
|
|
@ -1,149 +0,0 @@
|
|||
"""
|
||||
User-related use cases.
|
||||
|
||||
Functions in this file are focused on users and passwords.
|
||||
"""
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
import typing_extensions as typing
|
||||
import argon2
|
||||
import jwt
|
||||
|
||||
from db import get_connection
|
||||
from exceptions import DoesNotExist
|
||||
from settings import settings
|
||||
|
||||
import auth.base as auth_base
|
||||
|
||||
password_hasher = argon2.PasswordHasher()
|
||||
|
||||
|
||||
class User(typing.TypedDict):
|
||||
"""
|
||||
User representation.
|
||||
|
||||
The password hash is never included in these records and should
|
||||
not leave the database.
|
||||
"""
|
||||
|
||||
id: int
|
||||
username: str
|
||||
created_at: datetime.datetime
|
||||
updated_at: datetime.datetime
|
||||
password_updated_at: datetime.datetime
|
||||
|
||||
|
||||
def create_new_user(*, username: str, raw_password: str) -> User:
|
||||
"""
|
||||
Creates a new user record given a username and password.
|
||||
|
||||
The password is hashed and the hash is stored.
|
||||
|
||||
If successful, returns a dictionary representing the user.
|
||||
"""
|
||||
password_hash = password_hasher.hash(raw_password)
|
||||
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT INTO users (username, password_hash) VALUES (%s, %s) RETURNING id, username",
|
||||
(username, password_hash),
|
||||
)
|
||||
returned = cursor.fetchone()
|
||||
except Exception as exc:
|
||||
raise auth_base.UsernameAlreadyExists() from exc
|
||||
|
||||
inserted_id = returned[0]
|
||||
created_username = returned[1]
|
||||
|
||||
return User(
|
||||
id=inserted_id,
|
||||
username=created_username,
|
||||
created_at=datetime.datetime.now(),
|
||||
updated_at=datetime.datetime.now(),
|
||||
password_updated_at=datetime.datetime.now(),
|
||||
)
|
||||
|
||||
|
||||
def get_user(
|
||||
*, username: str = None, user_id: int = None
|
||||
) -> typing.Union[typing.NoReturn, User]:
|
||||
"""
|
||||
Retrieves a user record, if one exists, for the given user.
|
||||
|
||||
Querying can be done via username or user ID. The first one supplied, in this
|
||||
order, is used and any other values are ignored.
|
||||
"""
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
if username is not None:
|
||||
cursor.execute(
|
||||
"SELECT id, username, created_at, updated_at, password_updated_at FROM users WHERE username = %s;",
|
||||
(username,),
|
||||
)
|
||||
elif user_id is not None:
|
||||
cursor.execute(
|
||||
"SELECT id, username, created_at, updated_at, password_updated_at FROM users WHERE id = %s",
|
||||
(user_id,),
|
||||
)
|
||||
|
||||
fetched = cursor.fetchone()
|
||||
|
||||
if fetched is None:
|
||||
raise DoesNotExist()
|
||||
|
||||
return User(
|
||||
id=fetched[0],
|
||||
username=fetched[1],
|
||||
created_at=fetched[2],
|
||||
updated_at=fetched[3],
|
||||
password_updated_at=fetched[4],
|
||||
)
|
||||
|
||||
|
||||
def validate_password_for_user(user_id: int, raw_password: str) -> bool:
|
||||
"""
|
||||
Validates whether a password is correct for the given user.
|
||||
|
||||
Always returns a boolean representing whether it was a match or not.
|
||||
"""
|
||||
try:
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute("SELECT password_hash FROM users WHERE id = %s", (user_id,))
|
||||
fetched = cursor.fetchone()
|
||||
|
||||
current_secret_hash = fetched[0]
|
||||
return password_hasher.verify(current_secret_hash, raw_password)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
return False
|
||||
|
||||
|
||||
def generate_token_for_user(user: User) -> str:
|
||||
"""
|
||||
Generates an identity token for a given user.
|
||||
"""
|
||||
token_data: auth_base.IdentityTokenData = {
|
||||
"exp": (
|
||||
datetime.datetime.now() + datetime.timedelta(seconds=settings.JWT_LIFETIME)
|
||||
).timestamp(),
|
||||
"user_id": user["id"],
|
||||
"username": user["username"],
|
||||
"token_id": str(uuid.uuid4()),
|
||||
}
|
||||
|
||||
return jwt.encode(token_data, settings.JWT_SECRET_KEY, algorithm="HS256")
|
||||
|
||||
|
||||
def decode_token(
|
||||
token: str,
|
||||
) -> typing.Union[typing.NoReturn, auth_base.IdentityTokenData]:
|
||||
"""
|
||||
Decodes the given token.
|
||||
|
||||
This may raise if the token is expired or invalid.
|
||||
"""
|
||||
token_data: auth_base.IdentityTokenData = jwt.decode(
|
||||
token, settings.JWT_SECRET_KEY, algorithms=["HS256"]
|
||||
)
|
||||
|
||||
return token_data
|
75
backend/rotini/auth/view_test.py
Normal file
75
backend/rotini/auth/view_test.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import auth.jwt
|
||||
|
||||
import pytest
|
||||
|
||||
import django.urls
|
||||
import django.contrib.auth
|
||||
|
||||
AuthUser = django.contrib.auth.get_user_model()
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_request")
|
||||
def fixture_create_user_request(auth_client):
|
||||
def _create_user_request(username: str, password: str):
|
||||
return auth_client.post(
|
||||
django.urls.reverse("auth-user-list"),
|
||||
{"username": username, "password": password},
|
||||
)
|
||||
|
||||
return _create_user_request
|
||||
|
||||
|
||||
@pytest.fixture(name="login_request")
|
||||
def fixture_login_request(auth_client):
|
||||
def _login_request(username: str, password: str):
|
||||
return auth_client.post(
|
||||
django.urls.reverse("auth-session-list"),
|
||||
{"username": username, "password": password},
|
||||
)
|
||||
|
||||
return _login_request
|
||||
|
||||
|
||||
def test_create_new_user_returns_created_resource_on_success(create_user_request):
|
||||
mock_uname = "user"
|
||||
mock_pwd = "password"
|
||||
|
||||
response = create_user_request(mock_uname, mock_pwd)
|
||||
|
||||
created_user = AuthUser.objects.all().last()
|
||||
|
||||
expected = {"username": mock_uname, "id": created_user.id}
|
||||
|
||||
assert response.status_code == 201
|
||||
assert response.json() == expected
|
||||
|
||||
|
||||
def test_create_new_user_returns_400_on_nonunique_username(create_user_request):
|
||||
mock_uname = "user"
|
||||
mock_pwd = "password"
|
||||
|
||||
first = create_user_request(mock_uname, mock_pwd)
|
||||
second = create_user_request(mock_uname, mock_pwd)
|
||||
|
||||
assert first.status_code == 201
|
||||
assert second.status_code == 400
|
||||
|
||||
|
||||
def test_user_login_returns_valid_token_on_success(create_user_request, login_request):
|
||||
mock_uname = "user"
|
||||
mock_pwd = "password"
|
||||
|
||||
creation_response = create_user_request(mock_uname, mock_pwd)
|
||||
|
||||
login_response = login_request(mock_uname, mock_pwd)
|
||||
|
||||
assert login_response.status_code == 201
|
||||
|
||||
response_data = login_response.json()
|
||||
create_user_data = creation_response.json()
|
||||
|
||||
assert "token" in response_data
|
||||
|
||||
decoded_token = auth.jwt.decode_token(response_data["token"])
|
||||
|
||||
assert decoded_token["user_id"] == create_user_data["id"]
|
84
backend/rotini/auth/views.py
Normal file
84
backend/rotini/auth/views.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
import logging
|
||||
|
||||
import django.http
|
||||
import django.contrib.auth
|
||||
import rest_framework.views
|
||||
import rest_framework.status
|
||||
|
||||
import auth.jwt
|
||||
|
||||
AuthUser = django.contrib.auth.get_user_model()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SessionListView(rest_framework.views.APIView):
|
||||
"""
|
||||
Views handling authenticated user sessions.
|
||||
"""
|
||||
|
||||
def post(self, request: django.http.HttpRequest) -> django.http.HttpResponse:
|
||||
"""
|
||||
Handles signing in for existing users.
|
||||
|
||||
If valid credentials are provided, a token is included in the
|
||||
response that can then be used to make authenticated requests.
|
||||
|
||||
POST /auth/login/
|
||||
{
|
||||
"username": "testuser",
|
||||
"password": "password"
|
||||
}
|
||||
|
||||
200: The token is included as part of response cookies.
|
||||
401: The credentials provided were incorrect.
|
||||
"""
|
||||
credentials = {
|
||||
"username": request.data.get("username"),
|
||||
"password": request.data.get("password"),
|
||||
}
|
||||
|
||||
user = django.contrib.auth.authenticate(**credentials)
|
||||
|
||||
if user is not None:
|
||||
django.contrib.auth.login(request, user)
|
||||
|
||||
token = auth.jwt.generate_token_for_user(user_id=user.id)
|
||||
return django.http.JsonResponse({"token": token}, status=201)
|
||||
|
||||
return django.http.HttpResponse(status=401)
|
||||
|
||||
|
||||
class UserListView(rest_framework.views.APIView):
|
||||
"""
|
||||
Routes dealing with non-specific users (without IDs).
|
||||
"""
|
||||
|
||||
def post(self, request: django.http.HttpRequest) -> django.http.HttpResponse:
|
||||
"""
|
||||
Allows the creation of new users.
|
||||
|
||||
A username and password must be provided, the username must be unique across the system.
|
||||
"""
|
||||
|
||||
credentials = {
|
||||
"username": request.data.get("username"),
|
||||
"password": request.data.get("password"),
|
||||
}
|
||||
|
||||
# TODO: Add tests for view.
|
||||
try:
|
||||
new_user = AuthUser.objects.create_user(
|
||||
credentials["username"], "", credentials["password"]
|
||||
)
|
||||
logger.info(
|
||||
"Created new user.",
|
||||
extra={"username": new_user.username, "id": new_user.id},
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.exception(e)
|
||||
return django.http.HttpResponse(status=400)
|
||||
|
||||
return django.http.JsonResponse(
|
||||
{"username": new_user.username, "id": new_user.id}, status=201
|
||||
)
|
16
backend/rotini/base/asgi.py
Normal file
16
backend/rotini/base/asgi.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""
|
||||
ASGI config for rotini2 project.
|
||||
|
||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/4.2/howto/deployment/asgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "base.settings")
|
||||
|
||||
application = get_asgi_application()
|
16
backend/rotini/base/env_settings/test.py
Normal file
16
backend/rotini/base/env_settings/test.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""
|
||||
Settings overrides for test environments.
|
||||
"""
|
||||
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql_psycopg2",
|
||||
"NAME": "postgres",
|
||||
"USER": "postgres",
|
||||
"PASSWORD": "test",
|
||||
"HOST": "localhost",
|
||||
"PORT": "5431",
|
||||
}
|
||||
}
|
||||
|
||||
USER_UPLOAD_ROOT = "/tmp"
|
117
backend/rotini/base/settings.py
Normal file
117
backend/rotini/base/settings.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
"""
|
||||
Base settings for all environments.
|
||||
|
||||
These values can be overridden by base.env_settings.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = os.environ["DJANGO_SECRET_KEY"]
|
||||
JWT_SIGNING_SECRET = os.environ["JWT_SIGNING_SECRET"]
|
||||
|
||||
DEBUG = True
|
||||
|
||||
ALLOWED_HOSTS = ["*"]
|
||||
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"rest_framework",
|
||||
"files",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"auth.middleware.JwtMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "base.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = "base.wsgi.application"
|
||||
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql_psycopg2",
|
||||
"NAME": "postgres",
|
||||
"USER": "postgres",
|
||||
"PASSWORD": "test",
|
||||
"HOST": "docker.host.internal",
|
||||
"PORT": "5432",
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_RENDERER_CLASSES": [
|
||||
"rest_framework.renderers.JSONRenderer",
|
||||
],
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": [
|
||||
"rest_framework.authentication.BasicAuthentication",
|
||||
"rest_framework.authentication.SessionAuthentication",
|
||||
],
|
||||
}
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
LANGUAGE_CODE = "en-us"
|
||||
|
||||
TIME_ZONE = "UTC"
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
STATIC_URL = "static/"
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
|
||||
USER_UPLOAD_ROOT = os.environ.get("ROTINI_UPLOAD_ROOT", "/tmp")
|
||||
|
||||
# Importing overrides for environment.
|
||||
|
||||
if "test" in sys.argv[0]:
|
||||
from base.env_settings.test import *
|
27
backend/rotini/base/urls.py
Normal file
27
backend/rotini/base/urls.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
"""
|
||||
URL configuration for rotini2 project.
|
||||
|
||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||
https://docs.djangoproject.com/en/4.2/topics/http/urls/
|
||||
Examples:
|
||||
Function views
|
||||
1. Add an import: from my_app import views
|
||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
||||
Class-based views
|
||||
1. Add an import: from other_app.views import Home
|
||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
||||
Including another URLconf
|
||||
1. Import the include() function: from django.urls import include, path
|
||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
||||
"""
|
||||
from django.contrib import admin
|
||||
import django.urls as django_urls
|
||||
|
||||
import files.urls as files_urls
|
||||
import auth.urls
|
||||
|
||||
urlpatterns = [
|
||||
django_urls.path("admin/", admin.site.urls),
|
||||
django_urls.path("", django_urls.include(files_urls.urlpatterns)),
|
||||
django_urls.path("auth/", django_urls.include(auth.urls.urlpatterns)),
|
||||
]
|
16
backend/rotini/base/wsgi.py
Normal file
16
backend/rotini/base/wsgi.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""
|
||||
WSGI config for rotini2 project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/4.2/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rotini2.settings")
|
||||
|
||||
application = get_wsgi_application()
|
38
backend/rotini/conftest.py
Normal file
38
backend/rotini/conftest.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
"""
|
||||
Global fixtures
|
||||
"""
|
||||
|
||||
import django.test as django_test
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def anyio_backend():
|
||||
return "asyncio"
|
||||
|
||||
|
||||
@pytest.fixture(name="test_user_credentials")
|
||||
def fixture_test_user_creds():
|
||||
"""
|
||||
Test user credentials.
|
||||
"""
|
||||
return {"username": "testuser", "password": "testpassword"}
|
||||
|
||||
|
||||
@pytest.fixture(name="test_user", autouse=True)
|
||||
def fixture_create_test_user(django_user_model, test_user_credentials):
|
||||
django_user_model.objects.create_user(**test_user_credentials)
|
||||
|
||||
|
||||
@pytest.fixture(name="no_auth_client")
|
||||
def fixture_no_auth_client() -> django_test.Client:
|
||||
"""HTTP client without any authentication"""
|
||||
return django_test.Client()
|
||||
|
||||
|
||||
@pytest.fixture(name="auth_client")
|
||||
def fixture_auth_client(test_user_credentials) -> django_test.Client:
|
||||
"""Authenticated HTTP client."""
|
||||
client = django_test.Client()
|
||||
assert client.login(**test_user_credentials)
|
||||
return client
|
|
@ -1,16 +0,0 @@
|
|||
import psycopg2
|
||||
|
||||
from settings import settings
|
||||
|
||||
|
||||
def get_connection():
|
||||
"""
|
||||
Create a database connection.
|
||||
"""
|
||||
return psycopg2.connect(
|
||||
user=settings.DATABASE_USERNAME,
|
||||
password=settings.DATABASE_PASSWORD,
|
||||
host=settings.DATABASE_HOST,
|
||||
port=settings.DATABASE_PORT,
|
||||
database=settings.DATABASE_NAME,
|
||||
)
|
|
@ -1,5 +0,0 @@
|
|||
DATABASE_USERNAME = "postgres"
|
||||
DATABASE_PASSWORD = "test"
|
||||
DATABASE_HOST = "localhost"
|
||||
DATABASE_PORT = 5431
|
||||
DATABASE_NAME = "postgres"
|
|
@ -1,5 +0,0 @@
|
|||
DATABASE_USERNAME = "postgres"
|
||||
DATABASE_PASSWORD = "test"
|
||||
DATABASE_HOST = "localhost"
|
||||
DATABASE_PORT = 5432
|
||||
DATABASE_NAME = "postgres"
|
|
@ -1,3 +0,0 @@
|
|||
from envs.local import *
|
||||
|
||||
DATABASE_HOST = "localhost"
|
|
@ -1,9 +0,0 @@
|
|||
import os
|
||||
|
||||
DATABASE_USERNAME = "postgres"
|
||||
DATABASE_PASSWORD = "test"
|
||||
DATABASE_HOST = "localhost"
|
||||
DATABASE_PORT = 5431
|
||||
DATABASE_NAME = "postgres"
|
||||
|
||||
STORAGE_ROOT = os.getenv("ROTINI_STORAGE_ROOT")
|
|
@ -1,4 +0,0 @@
|
|||
class DoesNotExist(Exception):
|
||||
"""
|
||||
General purpose exception signalling a failure to find a database record.
|
||||
"""
|
6
backend/rotini/files/apps.py
Normal file
6
backend/rotini/files/apps.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class FilesConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "files"
|
|
@ -1,13 +0,0 @@
|
|||
import typing_extensions as typing
|
||||
|
||||
|
||||
class FileRecord(typing.TypedDict):
|
||||
"""
|
||||
Database record associated with a file tracked
|
||||
by the system.
|
||||
"""
|
||||
|
||||
id: str
|
||||
size: int
|
||||
path: str
|
||||
filename: str
|
29
backend/rotini/files/migrations/0001_initial.py
Normal file
29
backend/rotini/files/migrations/0001_initial.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# Generated by Django 4.2.7 on 2023-11-17 06:15
|
||||
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="File",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("path", models.CharField(max_length=4096)),
|
||||
("size", models.IntegerField()),
|
||||
],
|
||||
),
|
||||
]
|
24
backend/rotini/files/migrations/0002_file_owner.py
Normal file
24
backend/rotini/files/migrations/0002_file_owner.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
# Generated by Django 4.2.7 on 2023-11-18 06:02
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("files", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="file",
|
||||
name="owner",
|
||||
field=models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
]
|
17
backend/rotini/files/models.py
Normal file
17
backend/rotini/files/models.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
import uuid
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class File(models.Model):
|
||||
"""
|
||||
Represents a file tracked by the system.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
path = models.CharField(max_length=4096, null=False)
|
||||
size = models.IntegerField(null=False)
|
||||
owner = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True
|
||||
)
|
|
@ -1,133 +0,0 @@
|
|||
"""
|
||||
Files API.
|
||||
|
||||
This API allows users to create and query for existing data about
|
||||
files that live in the system.
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
||||
from fastapi import APIRouter, HTTPException, UploadFile, Request
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
import files.use_cases as files_use_cases
|
||||
from settings import settings
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
|
||||
@router.get("/", status_code=200)
|
||||
async def list_files(request: Request):
|
||||
"""
|
||||
Fetches all files owned by the logged-in user.
|
||||
|
||||
200 { [<FileRecord>, ...] }
|
||||
|
||||
If the user is logged in, file records that they
|
||||
own are returned.
|
||||
|
||||
401 {}
|
||||
|
||||
If the request is not authenticated, it fails.
|
||||
"""
|
||||
# FIXME: Temporarily fetching files belonging to the base user.
|
||||
# to be resolved once users can log in.
|
||||
current_user_id = (
|
||||
request.state.user["user_id"] if hasattr(request.state, "user") else 1
|
||||
)
|
||||
return files_use_cases.get_all_files_owned_by_user(current_user_id)
|
||||
|
||||
|
||||
@router.post("/", status_code=201)
|
||||
async def upload_file(request: Request, file: UploadFile) -> files_use_cases.FileRecord:
|
||||
"""
|
||||
Receives files uploaded by the user, saving them to disk and
|
||||
recording their existence in the database.
|
||||
|
||||
201 { <FileRecord> }
|
||||
|
||||
The file was uploaded and registered successfully.
|
||||
"""
|
||||
|
||||
content = await file.read()
|
||||
size = len(content)
|
||||
dest_path = pathlib.Path(settings.STORAGE_ROOT, file.filename)
|
||||
|
||||
with open(dest_path, "wb") as f:
|
||||
f.write(content)
|
||||
# FIXME: Temporarily fetching files belonging to the base user.
|
||||
# to be resolved once users can log in.
|
||||
created_record = files_use_cases.create_file_record(
|
||||
str(dest_path),
|
||||
size,
|
||||
request.state.user["user_id"] if hasattr(request.state, "user") else 1,
|
||||
)
|
||||
|
||||
return created_record
|
||||
|
||||
|
||||
@router.get("/{file_id}/")
|
||||
def get_file_details(file_id: str):
|
||||
file = files_use_cases.get_file_record_by_id(file_id)
|
||||
|
||||
if file is None:
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return file
|
||||
|
||||
|
||||
@router.get("/{file_id}/content/")
|
||||
def get_file_content(file_id: str) -> FileResponse:
|
||||
"""
|
||||
Retrieves the file data associated with a given File ID.
|
||||
|
||||
This returns the file for download as a streamed file.
|
||||
|
||||
GET /files/{file_id}/content/
|
||||
|
||||
200 { <File> }
|
||||
|
||||
The file data is returned as a stream if the file exists.
|
||||
|
||||
404 {}
|
||||
|
||||
The file ID did not map to anything.
|
||||
"""
|
||||
file = files_use_cases.get_file_record_by_id(file_id)
|
||||
|
||||
if file is None:
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return FileResponse(
|
||||
path=file["path"],
|
||||
media_type="application/octet-stream",
|
||||
filename=file["filename"],
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{file_id}/")
|
||||
def delete_file(file_id: str) -> files_use_cases.FileRecord:
|
||||
"""
|
||||
Deletes a file given its ID.
|
||||
|
||||
This will delete the file in the database records as well
|
||||
as on disk. The operation is not reversible.
|
||||
|
||||
DELETE /files/{file_id}/
|
||||
|
||||
200 { <FileRecord> }
|
||||
|
||||
The file exists and has been deleted from storage and
|
||||
from the database.
|
||||
|
||||
404 {}
|
||||
|
||||
The file ID did not map to anything.
|
||||
|
||||
"""
|
||||
try:
|
||||
file = files_use_cases.delete_file_record_by_id(file_id)
|
||||
except files_use_cases.DoesNotExist as exc:
|
||||
raise HTTPException(status_code=404) from exc
|
||||
|
||||
return file
|
39
backend/rotini/files/serializers.py
Normal file
39
backend/rotini/files/serializers.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
import typing
|
||||
import pathlib
|
||||
|
||||
import rest_framework.serializers as drf_serializers
|
||||
|
||||
import files.models as files_models
|
||||
|
||||
|
||||
class FileDict(typing.TypedDict):
|
||||
id: str
|
||||
path: str
|
||||
size: int
|
||||
filename: str
|
||||
owner_id: int
|
||||
|
||||
|
||||
class FileSerializer(drf_serializers.ModelSerializer):
|
||||
def validate_path(self, value: str) -> typing.Union[typing.NoReturn, str]:
|
||||
if not value:
|
||||
raise drf_serializers.ValidationError("Path must not be empty.")
|
||||
return value
|
||||
|
||||
def validate_owner(self, value: int) -> typing.Union[typing.NoReturn, int]:
|
||||
if not value:
|
||||
raise drf_serializers.ValidationError("File must have an owner.")
|
||||
return value
|
||||
|
||||
def to_representation(self, instance: files_models.File) -> FileDict:
|
||||
return {
|
||||
"id": instance.id,
|
||||
"path": instance.path,
|
||||
"size": instance.size,
|
||||
"owner_id": instance.owner.id,
|
||||
"filename": pathlib.Path(instance.path).name,
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = files_models.File
|
||||
fields = "__all__"
|
15
backend/rotini/files/urls.py
Normal file
15
backend/rotini/files/urls.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
import django.urls as dj_urls
|
||||
import rest_framework.routers as drf_routers
|
||||
|
||||
import files.views as file_views
|
||||
|
||||
router = drf_routers.DefaultRouter()
|
||||
router.register("files", file_views.FileViewSet, basename="files")
|
||||
|
||||
urlpatterns = router.urls + [
|
||||
dj_urls.path(
|
||||
"files/<str:file_id>/content/",
|
||||
file_views.FileDataView.as_view(),
|
||||
name="files-detail-data",
|
||||
),
|
||||
]
|
|
@ -1,119 +0,0 @@
|
|||
"""
|
||||
File-related use cases.
|
||||
|
||||
Use cases and data structures defined in this file
|
||||
manipulate file records in the database or represent them
|
||||
after they have been read.
|
||||
"""
|
||||
import pathlib
|
||||
|
||||
import typing_extensions as typing
|
||||
|
||||
from db import get_connection
|
||||
from settings import settings
|
||||
|
||||
from permissions.base import Permissions
|
||||
from permissions.files import set_file_permission
|
||||
|
||||
from exceptions import DoesNotExist
|
||||
|
||||
from files.base import FileRecord
|
||||
|
||||
|
||||
def create_file_record(path: str, size: int, owner_id: int) -> FileRecord:
|
||||
"""
|
||||
Creates a record representing an uploaded file in the database.
|
||||
|
||||
The record itself does not ensure that the file exists on disk, but just
|
||||
that it's tracked by the system.
|
||||
"""
|
||||
inserted_id = None
|
||||
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"INSERT INTO files (path, size) VALUES (%s, %s) RETURNING id", (path, size)
|
||||
)
|
||||
|
||||
inserted_id = cursor.fetchone()[0]
|
||||
|
||||
set_file_permission(inserted_id, owner_id, list(Permissions))
|
||||
|
||||
filename = pathlib.Path(path).name
|
||||
|
||||
return FileRecord(id=inserted_id, size=size, path=path, filename=filename)
|
||||
|
||||
|
||||
def get_all_files_owned_by_user(user_id: int) -> typing.Tuple[FileRecord]:
|
||||
"""
|
||||
Gets all the file records owned by the user.
|
||||
|
||||
A file is considered owned if the user has all permissions on a given file. There
|
||||
can be more than one owner to a file, but all files must have an owner.
|
||||
"""
|
||||
rows = None
|
||||
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""SELECT
|
||||
f.*
|
||||
from files f
|
||||
join permissions_files pf
|
||||
on f.id = pf.file_id
|
||||
where
|
||||
pf.user_id = %s
|
||||
and pf.value = %s;""",
|
||||
(user_id, sum(p.value for p in Permissions)),
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
if rows is None:
|
||||
raise RuntimeError("Failed to get files.")
|
||||
|
||||
return (
|
||||
FileRecord(
|
||||
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
|
||||
)
|
||||
for row in rows
|
||||
)
|
||||
|
||||
|
||||
def get_file_record_by_id(file_id: str) -> typing.Optional[FileRecord]:
|
||||
"""
|
||||
Fetches a single file by ID.
|
||||
|
||||
If the ID doesn't correspond to a record, None is returned.
|
||||
"""
|
||||
|
||||
row = None
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute("SELECT * FROM files WHERE id=%s;", (file_id,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return FileRecord(
|
||||
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
|
||||
)
|
||||
|
||||
|
||||
def delete_file_record_by_id(file_id: str) -> typing.Union[typing.NoReturn, FileRecord]:
|
||||
"""
|
||||
Deletes a single file by ID, including its presence in storage.
|
||||
|
||||
If the ID doesn't correspond to a record, DoesNotExist is raised.
|
||||
"""
|
||||
|
||||
row = None
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute("DELETE FROM files WHERE id=%s RETURNING *;", (file_id,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row is None:
|
||||
raise DoesNotExist()
|
||||
|
||||
pathlib.Path(pathlib.Path(settings.STORAGE_ROOT, row[1])).unlink()
|
||||
|
||||
return FileRecord(
|
||||
id=row[0], path=row[1], size=row[2], filename=pathlib.Path(row[1]).name
|
||||
)
|
128
backend/rotini/files/views.py
Normal file
128
backend/rotini/files/views.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
import pathlib
|
||||
|
||||
import django.http as django_http
|
||||
import django.conf as django_conf
|
||||
import rest_framework.viewsets as drf_viewsets
|
||||
import rest_framework.status as drf_status
|
||||
import rest_framework.views as drf_views
|
||||
import rest_framework.permissions as drf_permissions
|
||||
|
||||
import files.serializers as files_serializers
|
||||
import files.models as files_models
|
||||
|
||||
|
||||
class FileViewSet(drf_viewsets.ModelViewSet):
|
||||
"""
|
||||
File retrieval and manipulation
|
||||
|
||||
GET /file/
|
||||
|
||||
200 OK { [FileSerializerData] }
|
||||
|
||||
On success, returns all the files owned by the logged-in
|
||||
user.
|
||||
|
||||
GET /file/{file_id}/
|
||||
|
||||
200 OK { FileSerializerData }
|
||||
|
||||
On success, returns a single file's metadata by ID. Note that
|
||||
this does not provide the file data, which can be fetched via
|
||||
/file/{file_id}/content/.
|
||||
|
||||
DELETE /file/{file_id}/
|
||||
|
||||
204 NO CONTENT {}
|
||||
|
||||
Deletes an owned file.
|
||||
|
||||
PUT /file/{file_id}/ { FileMetadata }
|
||||
|
||||
200 OK {}
|
||||
|
||||
Mutates the file metadata for the given file. The underlying
|
||||
resource on disk stays the same.
|
||||
"""
|
||||
|
||||
queryset = files_models.File.objects.all()
|
||||
serializer_class = files_serializers.FileSerializer
|
||||
|
||||
permission_classes = [drf_permissions.IsAuthenticated]
|
||||
|
||||
def get_queryset(self):
|
||||
return self.queryset.filter(owner_id=self.request.user.id)
|
||||
|
||||
def create(
|
||||
self, request: django_http.HttpRequest, *args, **kwargs
|
||||
) -> django_http.JsonResponse:
|
||||
"""
|
||||
Handles the upload and metadata records for a new file.
|
||||
"""
|
||||
file_received = request.FILES.get("file")
|
||||
|
||||
if not file_received:
|
||||
return django_http.HttpResponseBadRequest()
|
||||
|
||||
content = request.FILES.get("file").read()
|
||||
size = len(content)
|
||||
dest_path = pathlib.Path(
|
||||
django_conf.settings.USER_UPLOAD_ROOT, request.FILES.get("file").name
|
||||
)
|
||||
|
||||
file = self.get_serializer_class()(
|
||||
data={"path": str(dest_path), "size": size, "owner": request.user.id}
|
||||
)
|
||||
|
||||
with open(dest_path, "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
if file.is_valid(raise_exception=True):
|
||||
file.save()
|
||||
|
||||
return django_http.JsonResponse(file.data, status=drf_status.HTTP_201_CREATED)
|
||||
|
||||
def destroy(
|
||||
self, request: django_http.HttpRequest, *args, **kwargs
|
||||
) -> django_http.HttpResponse:
|
||||
pk = kwargs["pk"]
|
||||
file_selected = self.queryset.filter(pk=pk).first()
|
||||
|
||||
if file_selected is None:
|
||||
return django_http.HttpResponseNotFound()
|
||||
|
||||
pathlib.Path(file_selected.path).unlink()
|
||||
|
||||
file_selected.delete()
|
||||
|
||||
return django_http.HttpResponse(status=drf_status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class FileDataView(drf_views.APIView):
|
||||
"""File downloads"""
|
||||
|
||||
queryset = files_models.File.objects.all()
|
||||
permission_classes = [drf_permissions.IsAuthenticated]
|
||||
|
||||
def get_queryset(self):
|
||||
return self.queryset.filter(owner_id=self.request.user.id)
|
||||
|
||||
def get(self, _, file_id: str) -> django_http.HttpResponse:
|
||||
"""
|
||||
Retrieves and serves the given file, by ID.
|
||||
|
||||
The file must be owned by the logged-in user, else 404.
|
||||
"""
|
||||
|
||||
file = self.get_queryset().filter(id=file_id).first()
|
||||
|
||||
if file is None:
|
||||
return django_http.HttpResponseNotFound()
|
||||
|
||||
with open(
|
||||
pathlib.Path(django_conf.settings.USER_UPLOAD_ROOT, file.path), "rb"
|
||||
) as f:
|
||||
return django_http.HttpResponse(
|
||||
f.read(),
|
||||
headers={"Content-Disposition": f'attachment; filename="{file.path}"'},
|
||||
content_type="application/octet-stream",
|
||||
)
|
168
backend/rotini/files/views_test.py
Normal file
168
backend/rotini/files/views_test.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
import pathlib
|
||||
|
||||
import rest_framework.status as drf_status
|
||||
import django.urls as django_urls
|
||||
import pytest
|
||||
|
||||
pytestmark = [pytest.mark.anyio, pytest.mark.django_db]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"route_name,route_params",
|
||||
[
|
||||
("files-detail-data", {"file_id": "abc"}),
|
||||
("files-list", None),
|
||||
("files-detail", {"pk": "abc"}),
|
||||
],
|
||||
ids=["details-data", "list", "details"],
|
||||
)
|
||||
def test_files_views_return_401_if_unauthenticated(
|
||||
no_auth_client, route_name, route_params
|
||||
):
|
||||
"""The files API requires authentication."""
|
||||
response = no_auth_client.get(django_urls.reverse(route_name, kwargs=route_params))
|
||||
assert response.status_code == drf_status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_file_downloads_404_if_does_not_exist(auth_client):
|
||||
"""Attempting to download a file that doesn't exist yields 404 for authenticated users."""
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = auth_client.get(
|
||||
django_urls.reverse("files-detail-data", kwargs={"file_id": non_existent_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_file_deletion_returns_404_if_does_not_exist(auth_client):
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = auth_client.delete(
|
||||
django_urls.reverse("files-detail", kwargs={"pk": non_existent_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_file_detail_returns_404_if_does_not_exist(auth_client):
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = auth_client.get(
|
||||
django_urls.reverse("files-detail", kwargs={"pk": non_existent_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_list_files_returns_registered_files_and_200(auth_client, tmp_path):
|
||||
mock_file_1 = tmp_path / "test1.txt"
|
||||
mock_file_1.write_text("testtest")
|
||||
|
||||
with open(str(mock_file_1), "rb") as mock_file_stream:
|
||||
response = auth_client.post(
|
||||
django_urls.reverse("files-list"), {"file": mock_file_stream}
|
||||
)
|
||||
|
||||
mock_file_1_data = response.json()
|
||||
|
||||
mock_file_2 = tmp_path / "test2.txt"
|
||||
mock_file_2.write_text("testtest")
|
||||
|
||||
with open(str(mock_file_2), "rb") as mock_file_stream:
|
||||
response = auth_client.post(
|
||||
django_urls.reverse("files-list"), {"file": mock_file_stream}
|
||||
)
|
||||
|
||||
mock_file_2_data = response.json()
|
||||
|
||||
response = auth_client.get("/files/")
|
||||
|
||||
assert response.status_code == drf_status.HTTP_200_OK
|
||||
assert response.json() == [mock_file_1_data, mock_file_2_data]
|
||||
|
||||
|
||||
def test_file_details_returns_specified_file_and_200(auth_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = auth_client.post(
|
||||
django_urls.reverse("files-list"), {"file": mock_file_stream}
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
created_file_id = response_data["id"]
|
||||
|
||||
response = auth_client.get(
|
||||
django_urls.reverse("files-detail", kwargs={"pk": created_file_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_200_OK
|
||||
assert response.json() == response_data
|
||||
|
||||
|
||||
def test_file_deletion_deletes_record_and_file(auth_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = auth_client.post(
|
||||
django_urls.reverse("files-list"), {"file": mock_file_stream}
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
file_path = response_data["path"]
|
||||
|
||||
assert pathlib.Path(file_path).exists()
|
||||
response = auth_client.get(
|
||||
django_urls.reverse("files-detail", kwargs={"pk": file_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_200_OK
|
||||
|
||||
auth_client.delete(django_urls.reverse("files-detail", kwargs={"pk": file_id}))
|
||||
assert not pathlib.Path(file_path).exists()
|
||||
|
||||
response = auth_client.get(
|
||||
django_urls.reverse("files-detail", kwargs={"pk": file_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_file_deletion_200_and_return_deleted_resource(auth_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = auth_client.post(
|
||||
django_urls.reverse("files-list"), {"file": mock_file_stream}
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
|
||||
response = auth_client.delete(
|
||||
django_urls.reverse("files-detail", kwargs={"pk": file_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_204_NO_CONTENT
|
||||
|
||||
|
||||
def test_file_downloads_200_and_return_file(auth_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = auth_client.post(
|
||||
django_urls.reverse("files-list"), {"file": mock_file_stream}
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
|
||||
response = auth_client.get(
|
||||
django_urls.reverse("files-detail-data", kwargs={"file_id": file_id})
|
||||
)
|
||||
|
||||
assert response.status_code == drf_status.HTTP_200_OK
|
||||
assert response.content.decode("utf8") == mock_file.read_text()
|
|
@ -1,34 +0,0 @@
|
|||
"""
|
||||
Rotini: a self-hosted cloud storage & productivity app.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
import auth.middleware as auth_middleware
|
||||
import auth.routes as auth_routes
|
||||
|
||||
import files.routes as files_routes
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
origins = ["http://localhost:1234"]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
app.add_middleware(auth_middleware.AuthenticationMiddleware)
|
||||
|
||||
routers = [files_routes.router, auth_routes.router]
|
||||
|
||||
for router in routers:
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
@app.get("/", status_code=204)
|
||||
def healthcheck():
|
||||
pass
|
25
backend/rotini/manage.py
Executable file
25
backend/rotini/manage.py
Executable file
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
# pylint: disable=import-outside-toplevel
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Run administrative tasks."""
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "base.settings")
|
||||
try:
|
||||
from django.core.management import (
|
||||
execute_from_command_line,
|
||||
)
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,256 +0,0 @@
|
|||
"""
|
||||
Migration handler.
|
||||
|
||||
This module handles database migrations.
|
||||
|
||||
Migrations are expected to be Python files of the format:
|
||||
|
||||
```
|
||||
UID = <UUID, current migration>
|
||||
|
||||
PARENT = <UUID, migration that must be applied before present>
|
||||
|
||||
MESSAGE = <str, message>
|
||||
|
||||
UP_SQL = <SQL>
|
||||
|
||||
DOWN_SQL = <SQL>
|
||||
```
|
||||
|
||||
where UP_SQL is the change the migration represents and DOWN_SQL its inverse.
|
||||
|
||||
Usage:
|
||||
|
||||
python migrate.py <up|down|new> [<migration_name, if new>]
|
||||
|
||||
Not including a migration name executes everything from the last executed
|
||||
migration.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import pathlib
|
||||
import datetime
|
||||
import uuid
|
||||
import typing
|
||||
import importlib
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
|
||||
from settings import settings
|
||||
|
||||
VALID_COMMANDS = ["up", "down", "new"]
|
||||
|
||||
DIRECTION_UP = 1
|
||||
DIRECTION_DOWN = -1
|
||||
|
||||
# UUID attached to a migration.
|
||||
MigrationID = str
|
||||
|
||||
# Filename (without ext.) of a migration.
|
||||
MigrationModuleName = str
|
||||
|
||||
MigrationItem = collections.namedtuple("MigrationItem", "id module")
|
||||
|
||||
|
||||
def _get_connection():
|
||||
"""
|
||||
Create a database connection.
|
||||
"""
|
||||
return psycopg2.connect(
|
||||
user=settings.DATABASE_USERNAME,
|
||||
password=settings.DATABASE_PASSWORD,
|
||||
host=settings.DATABASE_HOST,
|
||||
port=settings.DATABASE_PORT,
|
||||
database=settings.DATABASE_NAME,
|
||||
)
|
||||
|
||||
|
||||
def _ensure_migration_table():
|
||||
"""
|
||||
Ensure that the migration tracking table exists.
|
||||
"""
|
||||
connection = _get_connection()
|
||||
|
||||
maybe_create_sql = """
|
||||
CREATE TABLE IF NOT EXISTS migrations_lastapplied (
|
||||
migration_uid text NOT NULL
|
||||
);
|
||||
"""
|
||||
|
||||
with connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(maybe_create_sql)
|
||||
|
||||
|
||||
def _get_migration_sequence() -> typing.List[MigrationItem]:
|
||||
"""
|
||||
Collects migration files and builds a historical
|
||||
timeline.
|
||||
|
||||
This will detect duplicates and breaks in the sequence
|
||||
and raise if the history is not linear and complete.
|
||||
"""
|
||||
migrations_dir = pathlib.Path(__file__).parent
|
||||
migrations: typing.Dict[MigrationID, MigrationModuleName] = {}
|
||||
dependency_map: typing.Dict[MigrationID, MigrationID] = {}
|
||||
|
||||
for file in migrations_dir.iterdir():
|
||||
if file.name.startswith("migration_") and file.suffix == ".py":
|
||||
migration = importlib.import_module(file.stem)
|
||||
migration_id = migration.UID
|
||||
migration_parent = migration.PARENT
|
||||
|
||||
if migration_id in migrations:
|
||||
raise RuntimeError("Duplicate migrations.")
|
||||
|
||||
if migration_parent in dependency_map:
|
||||
raise RuntimeError("History must be linear.")
|
||||
|
||||
migrations[migration_id] = str(file.stem)
|
||||
dependency_map[migration_parent] = migration_id
|
||||
|
||||
if not dependency_map:
|
||||
print("No migrations yet!")
|
||||
return []
|
||||
|
||||
root_id = dependency_map["None"]
|
||||
history: typing.List[MigrationItem] = [MigrationItem(root_id, migrations[root_id])]
|
||||
|
||||
while history:
|
||||
next_id = dependency_map.get(history[-1].id)
|
||||
|
||||
if next_id is None:
|
||||
break
|
||||
|
||||
history.append(MigrationItem(next_id, migrations[next_id]))
|
||||
|
||||
return history
|
||||
|
||||
|
||||
def migrate(direction: typing.Union[typing.Literal[1], typing.Literal[-1]]):
|
||||
"""
|
||||
Runs a migration (expected to be in the current directory
|
||||
and labeled 'migration_<label>.py'.
|
||||
"""
|
||||
_ensure_migration_table()
|
||||
|
||||
connection = _get_connection()
|
||||
full_history, applied_migrations = _get_migration_sequence(), []
|
||||
last_applied = None
|
||||
|
||||
with connection, connection.cursor() as cursor:
|
||||
cursor.execute('SELECT migration_uid FROM "migrations_lastapplied"')
|
||||
last_applied_row = cursor.fetchone()
|
||||
last_applied = last_applied_row[0] if last_applied_row else None
|
||||
|
||||
full_history_ids = [migration.id for migration in full_history]
|
||||
|
||||
if last_applied is not None and last_applied not in full_history_ids:
|
||||
raise RuntimeError("Last applied migration is not in history.")
|
||||
|
||||
for migration_item in full_history:
|
||||
if last_applied is None:
|
||||
break
|
||||
|
||||
applied_migrations.append(migration_item)
|
||||
|
||||
if last_applied is not None and migration_item.id == last_applied:
|
||||
break
|
||||
|
||||
migrations_to_apply = (
|
||||
full_history[len(applied_migrations) :]
|
||||
if direction == DIRECTION_UP
|
||||
else list(reversed(applied_migrations))
|
||||
)
|
||||
|
||||
collected_sql = []
|
||||
for migration_item in migrations_to_apply:
|
||||
migration = importlib.import_module(migration_item.module)
|
||||
migration_sql = (
|
||||
migration.UP_SQL if direction == DIRECTION_UP else migration.DOWN_SQL
|
||||
)
|
||||
collected_sql.append(migration_sql)
|
||||
print(f"Collected {migration_item.module}: {migration.MESSAGE}")
|
||||
|
||||
with connection, connection.cursor() as cursor:
|
||||
for pos, sql in enumerate(collected_sql):
|
||||
print(f"Applying {migrations_to_apply[pos][1]}")
|
||||
cursor.execute(sql)
|
||||
|
||||
print(migrations_to_apply)
|
||||
next_last_applied = (
|
||||
None if direction == DIRECTION_DOWN else migrations_to_apply[-1].id
|
||||
)
|
||||
|
||||
if next_last_applied is None:
|
||||
cursor.execute("DELETE FROM migrations_lastapplied;")
|
||||
elif last_applied is None:
|
||||
cursor.execute(
|
||||
"INSERT INTO migrations_lastapplied (migration_uid) VALUES (%s);",
|
||||
(next_last_applied,),
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"UPDATE migrations_lastapplied SET migration_uid = %s",
|
||||
(next_last_applied,),
|
||||
)
|
||||
|
||||
|
||||
def create_migration_file(label: str, message: typing.Optional[str]):
|
||||
"""
|
||||
Create a new migration file with with a dependency on the last migration
|
||||
in history.
|
||||
"""
|
||||
migration_seq = _get_migration_sequence()
|
||||
|
||||
print("Found migrations:")
|
||||
for migration_id, migration_file in migration_seq:
|
||||
print(f"{migration_id}: {migration_file}")
|
||||
|
||||
parent_uid = migration_seq[-1][0] if migration_seq else None
|
||||
|
||||
migration_uid = str(uuid.uuid4())
|
||||
now = datetime.datetime.now().isoformat()
|
||||
content = f"""\"\"\"
|
||||
Generated: {now}
|
||||
|
||||
Message: {message}
|
||||
\"\"\"
|
||||
UID = "{migration_uid}"
|
||||
|
||||
PARENT = "{parent_uid}"
|
||||
|
||||
MESSAGE = "{message}"
|
||||
|
||||
UP_SQL = \"\"\" \"\"\"
|
||||
|
||||
DOWN_SQL = \"\"\" \"\"\"
|
||||
"""
|
||||
|
||||
migration_filename = f"migration_{len(migration_seq)}_{label}.py"
|
||||
|
||||
with open(migration_filename, "w", encoding="utf8") as migration_file:
|
||||
migration_file.write(content)
|
||||
|
||||
print(f"Created {migration_filename}.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
raise RuntimeError("Supply up/down as a first argument.")
|
||||
if sys.argv[1] not in VALID_COMMANDS:
|
||||
raise RuntimeError("Invalid commands.")
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
|
||||
COMMAND = arguments[0]
|
||||
MIGRATION_NAME = arguments[1] if len(arguments) >= 2 else None
|
||||
MIGRATION_MESSAGE = arguments[2] if len(arguments) == 3 else None
|
||||
|
||||
if COMMAND == "up":
|
||||
migrate(DIRECTION_UP)
|
||||
elif COMMAND == "down":
|
||||
migrate(DIRECTION_DOWN)
|
||||
elif COMMAND == "new":
|
||||
create_migration_file(MIGRATION_NAME, MIGRATION_MESSAGE)
|
|
@ -1,20 +0,0 @@
|
|||
"""
|
||||
Generated: 2023-08-07T16:14:11.314059
|
||||
|
||||
Message: Files table initial migration
|
||||
"""
|
||||
UID = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
|
||||
PARENT = "None"
|
||||
|
||||
MESSAGE = "Files table initial migration"
|
||||
|
||||
UP_SQL = """CREATE TABLE
|
||||
files
|
||||
(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
path text NOT NULL,
|
||||
size bigint NOT NULL
|
||||
);
|
||||
"""
|
||||
DOWN_SQL = """DROP TABLE files;"""
|
|
@ -1,25 +0,0 @@
|
|||
"""
|
||||
Generated: 2023-08-19T23:04:28.163820
|
||||
|
||||
Message: None
|
||||
"""
|
||||
UID = "141faa0b-6868-4d07-a24b-b45f98d2809d"
|
||||
|
||||
PARENT = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
|
||||
MESSAGE = "Creates the user table."
|
||||
|
||||
UP_SQL = """CREATE TABLE
|
||||
users
|
||||
(
|
||||
id bigserial PRIMARY KEY,
|
||||
username varchar(64) NOT NULL,
|
||||
password_hash varchar(128) NOT NULL,
|
||||
created_at timestamp DEFAULT now(),
|
||||
updated_at timestamp DEFAULT now(),
|
||||
password_updated_at timestamp DEFAULT now(),
|
||||
CONSTRAINT unique_username UNIQUE(username)
|
||||
)
|
||||
"""
|
||||
|
||||
DOWN_SQL = """DROP TABLE users;"""
|
|
@ -1,27 +0,0 @@
|
|||
"""
|
||||
Generated: 2023-08-27T11:56:17.800102
|
||||
|
||||
Message: Sets up permission-tracking on files
|
||||
"""
|
||||
UID = "3c755dd8-e02d-4a29-b4ee-2afa4d9b30d6"
|
||||
|
||||
PARENT = "141faa0b-6868-4d07-a24b-b45f98d2809d"
|
||||
|
||||
MESSAGE = "Sets up permission-tracking on files"
|
||||
|
||||
UP_SQL = """CREATE TABLE
|
||||
permissions_files
|
||||
(
|
||||
id bigserial PRIMARY KEY,
|
||||
file_id uuid NOT NULL,
|
||||
user_id bigint NOT NULL,
|
||||
value bigint NOT NULL,
|
||||
created_at timestamp DEFAULT now(),
|
||||
updated_at timestamp DEFAULT now(),
|
||||
CONSTRAINT file_fk FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE,
|
||||
CONSTRAINT user_fk FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
CONSTRAINT unique_permission_per_file_per_user UNIQUE(file_id, user_id)
|
||||
);
|
||||
"""
|
||||
|
||||
DOWN_SQL = """DROP TABLE permissions_files;"""
|
|
@ -1,16 +0,0 @@
|
|||
"""
|
||||
Creates the initial files table.
|
||||
"""
|
||||
|
||||
UP_SQL = """CREATE TABLE
|
||||
files
|
||||
(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
path text NOT NULL,
|
||||
size bigint NOT NULL
|
||||
);
|
||||
"""
|
||||
|
||||
DOWN_SQL = """
|
||||
DROP TABLE files
|
||||
"""
|
|
@ -1,23 +0,0 @@
|
|||
import enum
|
||||
|
||||
import typing_extensions as typing
|
||||
|
||||
|
||||
class Permissions(enum.Enum):
|
||||
"""
|
||||
Enumeration of individual permission bits.
|
||||
|
||||
Complex permissions are composed by combining these
|
||||
bits.
|
||||
"""
|
||||
|
||||
CAN_VIEW = 1 << 0
|
||||
CAN_DELETE = 1 << 1
|
||||
|
||||
|
||||
class FilePermission(typing.TypedDict):
|
||||
"""Representation of a permission applicable to a file+user pair"""
|
||||
|
||||
file: str
|
||||
user: int
|
||||
value: typing.List[Permissions]
|
|
@ -1,26 +0,0 @@
|
|||
import typing_extensions as typing
|
||||
|
||||
from permissions.base import Permissions, FilePermission
|
||||
from db import get_connection
|
||||
|
||||
|
||||
def set_file_permission(
|
||||
file_id: str, user_id: int, permissions: typing.List[Permissions]
|
||||
) -> FilePermission:
|
||||
"""
|
||||
Given a file+user pair, creates a permission record with the
|
||||
provided permission list.
|
||||
"""
|
||||
permission_value = sum(permission.value for permission in permissions)
|
||||
|
||||
with get_connection() as connection, connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"INSERT INTO permissions_files (user_id, file_id, value) VALUES (%s, %s, %s) RETURNING id;",
|
||||
(user_id, file_id, permission_value),
|
||||
)
|
||||
inserted_row = cursor.fetchone()
|
||||
|
||||
if inserted_row is None:
|
||||
raise RuntimeError("uh")
|
||||
|
||||
return FilePermission(file=file_id, user=user_id, value=permissions)
|
|
@ -1,60 +0,0 @@
|
|||
# pylint: disable=unused-import, wildcard-import, unused-wildcard-import, too-few-public-methods
|
||||
import os
|
||||
import typing
|
||||
|
||||
IS_CI = os.getenv("ROTINI_CI")
|
||||
IS_TEST = os.getenv("ROTINI_TEST")
|
||||
IS_MIGRATE = os.getenv("ROTINI_MIGRATE")
|
||||
|
||||
|
||||
class Settings:
|
||||
"""
|
||||
Representation of the configuration settings available to the
|
||||
application.
|
||||
"""
|
||||
|
||||
ENV: str
|
||||
|
||||
DATABASE_USERNAME: str
|
||||
DATABASE_PASSWORD: str
|
||||
DATABASE_HOST: str
|
||||
DATABASE_PORT: int
|
||||
DATABASE_NAME: str
|
||||
|
||||
STORAGE_ROOT: typing.Optional[str] = "."
|
||||
|
||||
JWT_SECRET_KEY: str = "placeholder"
|
||||
JWT_LIFETIME: int = 900 # 15 minutes.
|
||||
|
||||
def __init__(self, *_, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
def extract_settings(env: str, imported_module) -> Settings:
|
||||
"""
|
||||
Extracts all the exposed values from the given module and
|
||||
creates a corresponding Settings object.
|
||||
"""
|
||||
imported_values = {
|
||||
k: v for k, v in imported_module.__dict__.items() if not k.startswith("__")
|
||||
}
|
||||
return Settings(ENV=env, **imported_values)
|
||||
|
||||
|
||||
if IS_CI is not None:
|
||||
import envs.ci as ci_config
|
||||
|
||||
settings = extract_settings("ci", ci_config)
|
||||
elif IS_TEST is not None:
|
||||
import envs.test as test_config
|
||||
|
||||
settings = extract_settings("test", test_config)
|
||||
elif IS_MIGRATE is not None:
|
||||
import envs.migrate as migrate_config
|
||||
|
||||
settings = extract_settings("migrate", migrate_config)
|
||||
else:
|
||||
import envs.local as local_config
|
||||
|
||||
settings = extract_settings("local", local_config)
|
|
@ -21,5 +21,3 @@ until [ -n "$(docker exec $CONTAINER_NAME pg_isready | grep accepting)" ]; do
|
|||
echo "Waiting for DB to come alive..."
|
||||
sleep 0.1;
|
||||
done;
|
||||
|
||||
PYTHONPATH=rotini ROTINI_MIGRATE=1 .venv/bin/python rotini/migrations/migrate.py up
|
||||
|
|
|
@ -42,5 +42,7 @@ done;
|
|||
|
||||
sleep $HEALTHCHECK_SLEEP
|
||||
|
||||
ROTINI_TEST=1 PYTHONPATH=rotini $VENV_PYTHON rotini/migrations/migrate.py up || fail "Migrations failed."
|
||||
ROTINI_TEST=1 $VENV_PYTEST . -vv -s || fail "Test run failed."
|
||||
#ROTINI_TEST=1 PYTHONPATH=rotini $VENV_PYTHON rotini/migrations/migrate.py up || fail "Migrations failed."
|
||||
$VENV_PYTEST . -vv -s || fail "Test run failed."
|
||||
|
||||
cleanup
|
||||
|
|
|
@ -1,95 +0,0 @@
|
|||
"""
|
||||
Global fixtures
|
||||
|
||||
|
||||
"""
|
||||
from fastapi.testclient import TestClient
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
from main import app
|
||||
from db import get_connection
|
||||
from settings import settings
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def anyio_backend():
|
||||
return "asyncio"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=False)
|
||||
def reset_database():
|
||||
"""Empties all user tables between tests."""
|
||||
tables = ["files", "users", "permissions_files"]
|
||||
|
||||
with get_connection() as conn, conn.cursor() as cursor:
|
||||
for table in tables:
|
||||
cursor.execute("DELETE FROM " + table + ";")
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def set_storage_path(tmp_path, monkeypatch):
|
||||
"""
|
||||
Ensures that files stored by tests are stored
|
||||
in temporary directories.
|
||||
"""
|
||||
|
||||
files_dir = tmp_path / "files"
|
||||
files_dir.mkdir()
|
||||
|
||||
monkeypatch.setattr(settings, "STORAGE_ROOT", str(files_dir))
|
||||
|
||||
|
||||
@pytest.fixture(name="test_user_credentials")
|
||||
def fixture_test_user_creds():
|
||||
"""
|
||||
Test user credentials.
|
||||
"""
|
||||
return {"username": "testuser", "password": "testpassword"}
|
||||
|
||||
|
||||
@pytest.fixture(name="test_user", autouse=True)
|
||||
async def fixture_test_user(client_create_user, test_user_credentials):
|
||||
"""
|
||||
Sets up a test user using the `test_user_credentials` data.
|
||||
"""
|
||||
yield await client_create_user(test_user_credentials)
|
||||
|
||||
|
||||
@pytest.fixture(name="no_auth_client")
|
||||
async def fixture_no_auth_client():
|
||||
"""HTTP client without any authentication"""
|
||||
async with httpx.AsyncClient(app=app, base_url="http://test") as client:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(name="jwt_client")
|
||||
async def fixture_jwt_client(client_log_in, test_user_credentials):
|
||||
"""HTTP client with test user authentication via JWT"""
|
||||
response = await client_log_in(test_user_credentials)
|
||||
auth_header = response.headers["authorization"]
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
app=app, base_url="http://test", headers={"Authorization": auth_header}
|
||||
) as client:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(name="client_log_in")
|
||||
def fixture_client_log_in(no_auth_client):
|
||||
"""Logs in as the provided user"""
|
||||
|
||||
async def _client_log_in(credentials):
|
||||
return await no_auth_client.post("/auth/sessions/", json=credentials)
|
||||
|
||||
return _client_log_in
|
||||
|
||||
|
||||
@pytest.fixture(name="client_create_user")
|
||||
def fixture_client_create_user(no_auth_client):
|
||||
"""Creates a new user given credentials"""
|
||||
|
||||
async def _client_create_user(credentials):
|
||||
return await no_auth_client.post("/auth/users/", json=credentials)
|
||||
|
||||
return _client_create_user
|
|
@ -1,99 +0,0 @@
|
|||
import pytest
|
||||
|
||||
pytestmark = pytest.mark.anyio
|
||||
|
||||
|
||||
async def test_create_user_returns_201_on_success(client_create_user):
|
||||
credentials = {"username": "newuser", "password": "test"}
|
||||
response = await client_create_user(credentials)
|
||||
|
||||
assert response.status_code == 201
|
||||
|
||||
|
||||
async def test_create_user_with_nonunique_username_fails(client_create_user):
|
||||
credentials = {"username": "newuser", "password": "test"}
|
||||
await client_create_user(credentials)
|
||||
|
||||
# Recreate the same user, name collision.
|
||||
response = await client_create_user(credentials)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"credentials",
|
||||
[
|
||||
pytest.param({"username": "test"}, id="username_only"),
|
||||
pytest.param({"password": "test"}, id="password_only"),
|
||||
pytest.param({}, id="no_data"),
|
||||
],
|
||||
)
|
||||
async def test_create_user_requires_username_and_password_supplied(
|
||||
client_create_user, credentials
|
||||
):
|
||||
response = await client_create_user(credentials)
|
||||
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
async def test_log_in_returns_200_and_user_on_success(
|
||||
client_log_in, test_user_credentials
|
||||
):
|
||||
# The `test_user` fixture creates a user.
|
||||
|
||||
response = await client_log_in(test_user_credentials)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
returned = response.json()
|
||||
|
||||
assert returned["username"] == test_user_credentials["username"]
|
||||
|
||||
|
||||
async def test_log_in_attaches_identity_token_to_response_on_success(
|
||||
client_log_in, test_user_credentials
|
||||
):
|
||||
# This test specifically needs to inspect the JWT, hence the need to access
|
||||
# use case logic that is otherwise an implementation detail.
|
||||
|
||||
import auth.use_cases as auth_use_cases
|
||||
|
||||
response = await client_log_in(test_user_credentials)
|
||||
|
||||
returned_auth = response.headers.get("authorization")
|
||||
token = returned_auth.split(" ")[1] # Header of the form "Bearer <token>"
|
||||
|
||||
assert (
|
||||
auth_use_cases.decode_token(token)["username"]
|
||||
== test_user_credentials["username"]
|
||||
)
|
||||
|
||||
|
||||
async def test_log_in_returns_401_on_wrong_password(
|
||||
client_log_in, test_user_credentials
|
||||
):
|
||||
response = await client_log_in(
|
||||
{"username": test_user_credentials["username"], "password": "sillystring"}
|
||||
)
|
||||
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
async def test_log_in_returns_401_on_nonexistent_user(client_log_in):
|
||||
response = await client_log_in({"username": "notauser", "password": "sillystring"})
|
||||
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"credentials",
|
||||
[
|
||||
pytest.param({"username": "test"}, id="username_only"),
|
||||
pytest.param({"password": "test"}, id="password_only"),
|
||||
pytest.param({}, id="no_data"),
|
||||
],
|
||||
)
|
||||
async def test_log_in_returns_422_on_invalid_input(client_log_in, credentials):
|
||||
response = await client_log_in(credentials)
|
||||
|
||||
assert response.status_code == 422
|
|
@ -1,121 +0,0 @@
|
|||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = pytest.mark.anyio
|
||||
|
||||
|
||||
async def test_list_files_returns_registered_files_and_200(jwt_client, tmp_path):
|
||||
mock_file_1 = tmp_path / "test1.txt"
|
||||
mock_file_1.write_text("testtest")
|
||||
|
||||
with open(str(mock_file_1), "rb") as mock_file_stream:
|
||||
response = await jwt_client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
mock_file_1_data = response.json()
|
||||
|
||||
mock_file_2 = tmp_path / "test2.txt"
|
||||
mock_file_2.write_text("testtest")
|
||||
|
||||
with open(str(mock_file_2), "rb") as mock_file_stream:
|
||||
response = await jwt_client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
mock_file_2_data = response.json()
|
||||
|
||||
response = await jwt_client.get("/files/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == [mock_file_1_data, mock_file_2_data]
|
||||
|
||||
|
||||
async def test_file_details_returns_specified_file_and_200(jwt_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = await jwt_client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
created_file_id = response_data["id"]
|
||||
|
||||
response = await jwt_client.get(f"/files/{created_file_id}/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == response_data
|
||||
|
||||
|
||||
async def test_file_details_returns_404_if_does_not_exist(jwt_client):
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = await jwt_client.get(f"/files/{non_existent_id}/")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
async def test_file_deletion_returns_404_if_does_not_exist(jwt_client):
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = await jwt_client.delete(f"/files/{non_existent_id}/")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
async def test_file_deletion_deletes_record_and_file(jwt_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = await jwt_client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
file_path = response_data["path"]
|
||||
|
||||
assert pathlib.Path(file_path).exists()
|
||||
response = await jwt_client.get(f"/files/{file_id}/")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
await jwt_client.delete(f"/files/{file_id}/")
|
||||
assert not pathlib.Path(file_path).exists()
|
||||
|
||||
response = await jwt_client.get(f"/files/{file_id}/")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
async def test_file_deletion_200_and_return_deleted_resource(jwt_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = await jwt_client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
|
||||
response = await jwt_client.delete(f"/files/{file_id}/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == response_data
|
||||
|
||||
|
||||
async def test_file_downloads_200_and_return_file(jwt_client, tmp_path):
|
||||
mock_file = tmp_path / "test.txt"
|
||||
mock_file.write_text("testtest")
|
||||
|
||||
with open(str(mock_file), "rb") as mock_file_stream:
|
||||
response = await jwt_client.post("/files/", files={"file": mock_file_stream})
|
||||
|
||||
response_data = response.json()
|
||||
file_id = response_data["id"]
|
||||
|
||||
response = await jwt_client.get(f"/files/{file_id}/content/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.text == mock_file.read_text()
|
||||
|
||||
|
||||
async def test_file_downloads_404_if_does_not_exist(jwt_client):
|
||||
non_existent_id = "06f02980-864d-4832-a894-2e9d2543a79a"
|
||||
response = await jwt_client.get(f"/files/{non_existent_id}/content/")
|
||||
|
||||
assert response.status_code == 404
|
Reference in a new issue