first commit

This commit is contained in:
Ēriks K 2024-12-26 14:42:24 +02:00
commit 13b66f5771
64 changed files with 3233 additions and 0 deletions

177
.gitignore vendored Normal file
View File

@ -0,0 +1,177 @@
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
.idea/**/aws.xml
.idea/**/contentModel.xml
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
.idea/**/gradle.xml
.idea/**/libraries
cmake-build-*/
.idea/**/mongoSettings.xml
*.iws
out/
.idea_modules/
atlassian-ide-plugin.xml
.idea/replstate.xml
.idea/sonarlint/
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
.idea/httpRequests
.idea/caches/build_file_checksums.ser
*.log
*.pot
*.pyc
__pycache__/
local_settings.py
db.sqlite3
db.sqlite3-journal
media
docs/_book
test/
logs
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
pids
*.pid
*.seed
*.pid.lock
lib-cov
coverage
*.lcov
.nyc_output
.grunt
bower_components
.lock-wscript
build/Release
node_modules/
jspm_packages/
web_modules/
*.tsbuildinfo
.npm
.eslintcache
.stylelintcache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
.node_repl_history
*.tgz
.yarn-integrity
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.cache
.parcel-cache
.next
out
.nuxt
dist
.cache/
.vuepress/dist
.temp
.docusaurus
.serverless/
.fusebox/
.dynamodb/
.tern-port
.vscode-test
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
.yarn/*
!.yarn/releases
!.yarn/patches
!.yarn/plugins
!.yarn/sdks
!.yarn/versions
!.yarn/cache
dist/
npm-debug.log
yarn-error.log
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
*.manifest
*.spec
pip-log.txt
pip-delete-this-directory.txt
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
*.mo
instance/
.webassets-cache
.scrapy
docs/_build/
.pybuilder/
target/
.ipynb_checkpoints
profile_default/
ipython_config.py
.pdm.toml
.pdm-python
.pdm-build/
__pypackages__/
celerybeat-schedule
celerybeat.pid
*.sage.py
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
.spyderproject
.spyproject
.ropeproject
/site
.mypy_cache/
.dmypy.json
dmypy.json
.pyre/
.pytype/
cython_debug/

8
.idea/.gitignore generated vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

10
.idea/fastbackend_template.iml generated Normal file
View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="jdk" jdkName="Python 3.12 (fastbackend_template)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="Eslint" enabled="true" level="WARNING" enabled_by_default="true" />
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml generated Normal file
View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12 (fastbackend_template)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (fastbackend_template)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml generated Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/fastbackend_template.iml" filepath="$PROJECT_DIR$/.idea/fastbackend_template.iml" />
</modules>
</component>
</project>

6
cookiecutter.json Normal file
View File

@ -0,0 +1,6 @@
{
"project_name": "FastAPI Backend",
"project_slug": "{{ cookiecutter.project_name.lower().replace(' ', '_') }}",
"author": "keriks"
}

BIN
cookiecutter.zip Normal file

Binary file not shown.

View File

@ -0,0 +1,11 @@
.editorconfig
.gitattributes
.github
.gitignore
.gitlab-ci.yml
.idea
.pre-commit-config.yaml
.readthedocs.yml
.travis.yml
venv
.git

View File

@ -0,0 +1,21 @@
# http://editorconfig.org
root = true
[*]
max_line_length = 120
indent_style = space
indent_size = 4
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.{html,css,scss,json,yml,yaml,xml}]
indent_size = 2
[*.md]
trim_trailing_whitespace = false
[Makefile]
indent_style = tab

View File

@ -0,0 +1,4 @@
[flake8]
extend-ignore = E203, E701, E704, W605, F405
exclude = .git,__pycache__,venv,migrations
max-line-length = 120

View File

@ -0,0 +1,117 @@
*~
.fuse_hidden*
.directory
.Trash-*
.nfs*
*.log
*.pot
*.pyc
__pycache__/
local_settings.py
db.sqlite3
db.sqlite3-journal
media
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
*.stackdump
[Dd]esktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msix
*.msm
*.msp
*.lnk
.DS_Store
.AppleDouble
.LSOverride
Icon
._*
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
*.manifest
*.spec
pip-log.txt
pip-delete-this-directory.txt
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
*.mo
instance/
.webassets-cache
.scrapy
docs/_build/
.pybuilder/
target/
.ipynb_checkpoints
profile_default/
ipython_config.py
.pdm.toml
__pypackages__/
celerybeat-schedule
celerybeat.pid
*.sage.py
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
.spyderproject
.spyproject
.ropeproject
/site
.mypy_cache/
.dmypy.json
dmypy.json
.pyre/
.pytype/
cython_debug/
/db.sqlite
/db.sqlite-shm
/db.sqlite-wal
/envs.yml

View File

@ -0,0 +1,22 @@
FROM python:3.12-alpine as python
WORKDIR /app
ENV PYTHONUNBUFFERED 1
ENV TZ="Europe/Riga"
COPY requirements /app/requirements
RUN apk add libpq \
poppler-utils zlib-dev \
# curl \
&& pip install --no-cache-dir --upgrade -r requirements/production.txt \
&& rm -rf /var/cache/apk/*
COPY entrypoint.sh /entrypoint
COPY . /app
RUN chmod +x /entrypoint \
&& sed -i 's/\r$//g' /entrypoint
# Default listening address - 0.0.0.0:5000
EXPOSE 5000
ENTRYPOINT ["/entrypoint"]
CMD ["uvicorn", "--workers", "2", "--proxy-headers", "--host", "0.0.0.0", "--port", "5000", "--forwarded-allow-ips=*", "service.main:app"]

View File

@ -0,0 +1,63 @@
# `SERVICE_ROOT_PATH`
*Optional*, default value: ``
## Examples
`/api/v2`, ``
# `SERVICE_PROJECT_NAME`
*Optional*, default value: `logger-be`
# `SERVICE_DEBUG`
*Optional*, default value: `False`
# `SERVICE_SECRET_KEY`
*Optional*, default value: `CHANGE_ME--8^&gnoqen9+&9usjpjnsw*lhfqnl45p!^hdvf*s*i--INSECURE`
# `SERVICE_TOKEN_EXPIRATION_DAYS`
*Optional*, default value: `1`
# `SERVICE_CORS_ORIGINS`
*Optional*, default value: `[]`
# `SERVICE_ENVIRONMENT`
*Optional*, default value: `local`
## Examples
`local`, `testing`, `staging`, `production`
# `SERVICE_LOG_LEVEL`
*Optional*, default value: `INFO`
## Examples
`DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
# `SERVICE_REDIS_URL`
*Optional*, default value: `redis://redis:6379`
# `SERVICE_DATABASE_URL`
*Optional*, default value: `psql://{{ cookiecutter.project_slug }}:{{ cookiecutter.project_slug }}@postgres:5432/{{ cookiecutter.project_slug }}`
# `SERVICE_TIMEZONE`
*Optional*, default value: `Europe/Riga`
## Examples
`UTC`, `Europe/Riga`, `Europe/London`, `US/Pacific`
# `SERVICE_SENTRY_URL`
*Optional*, default value: `None`

View File

@ -0,0 +1,71 @@
.PHONY: clean clean/build clean/pyc help lint lint/flake8 lint/black lint/isort lint/type test run-tasks run-app envs envs/generate-md envs/generate-yaml
.DEFAULT_GOAL := help
define PRINT_HELP_PYSCRIPT
import re, sys
for line in sys.stdin:
match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
if match:
target, help = match.groups()
print("%-20s %s" % (target, help))
endef
export PRINT_HELP_PYSCRIPT
help:
@python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
clean: clean/build clean/pyc ## remove all build, test, coverage and Python artifacts
clean/build: ## remove build artifacts
rm -fr build/
rm -fr dist/
rm -fr .eggs/
find . -name '*.egg-info' -exec rm -fr {} +
find . -name '*.egg' -exec rm -f {} +
clean/pyc: ## remove Python file artifacts
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
find . -name '.pytest_cache' -exec rm -fr {} +
lint/flake8: ## check style with flake8
flake8 service
lint/black: ## check style with black
black service
lint/isort: ## check imports with isort
isort service
lint/type: ## check style with black
mypy service
lint: lint/isort lint/black lint/flake8 lint/type ## check style
test: ## run tests quickly with the default Python
pytest -v
docker-build:
docker build --progress plain --tag={{ cookiecutter.project_slug }}:latest .
docker-release:
docker build --tag={{ cookiecutter.project_slug }}:release .
docker-push: docker-build
docker push {{ cookiecutter.project_slug }}:latest
run-app:
exec uvicorn --reload --host 0.0.0.0 --port 8181 --proxy-headers --forwarded-allow-ips=* service.main:app
run-tasks:
exec dramatiq service.tasks --processes=1 --threads=2 --watch config --watch service
envs: envs/generate-md envs/generate-yaml
envs/generate-yaml:
#settings-doc generate --class service.config.ProjectSettings --output-format dotenv > example.env
settings-doc generate --class service.config.ProjectSettings --output-format dotenv --update example.env
envs/generate-md:
#settings-doc generate --class service.config.ProjectSettings --output-format markdown > ENVIRON.md
settings-doc generate --class service.config.ProjectSettings --output-format markdown --update ENVIRON.md

View File

@ -0,0 +1,12 @@
#! /usr/bin/env sh
# Let the DB start
python backend_pre_start.py
# Run migrations
aerich upgrade
# Create initial data in DB
if [ -n "$DEBUG" ]; then
python initial_data.py
fi

View File

@ -0,0 +1,28 @@
services:
app:
build:
context: .
dockerfile: compose/dev/Dockerfile
volumes:
- logs:/app/logs
- files:/app/media
- .:/app
depends_on:
- postgres
- redis
postgres:
image: postgres:16-alpine
restart: always
env_file:
- .env
volumes:
- postgres_data:/var/lib/postgresql/data
redis:
image: redis:alpine
volumes:
logs: { }
files: { }
postgres_data: { }

View File

@ -0,0 +1,19 @@
service:
root_path: "/api/v1"
project_name: {{ cookiecutter.project_name }}
debug: true
secret_key: ChangeME
token_expiration_days: 28
cors_origins:
- "http://localhost:5000"
- "https://example.com"
environment: "local"
log_level: "DEBUG"
redis_url: "redis://redis:6379"
database_url: "psql://{{ cookiecutter.project_slug }}:{{ cookiecutter.project_slug }}@postgres:5432/{{ cookiecutter.project_slug }}"
timezone: "UTC"
sentry_url: null

View File

@ -0,0 +1,30 @@
import time
from typing import Sequence, Type
import orjson
from tortoise import Model, Tortoise, run_async
from service.config import TORTOISE_ORM
from service.database.models import User
async def do_export():
export_ts = int(time.time())
models: Sequence[Type[Model]] = (User,)
await Tortoise.init(TORTOISE_ORM)
for filename, model in ((model._meta.db_table, model) for model in models):
all_objects = await model.all().order_by("id").values()
if all_objects:
with open(f"fixtures/{export_ts}__{filename}.json", "wb") as f:
f.write(orjson.dumps(all_objects))
def main():
start_ts = time.time()
run_async(do_export())
end_ts = time.time()
print(f"Export completed in {end_ts - start_ts:.3f}s")
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,73 @@
import random
import string
import time
import uuid
from typing import Type
from tortoise import Tortoise, run_async, Model
from service.config import TORTOISE_ORM
from service.database.models import User
CHARS = string.ascii_letters + string.digits
user_count = 205
journal_count = 27
form_count = 2
form_input_count = 5
option_count = 8
def rand_str(k: int = 10) -> str:
return "".join(random.choices(CHARS, k=k))
async def get_model_next_id(model: Type[Model]) -> int:
max_uid = await model.all().order_by("-id").first()
if max_uid is None:
max_uid = 0
else:
max_uid = max_uid.id
return max_uid + 1
async def do_import():
await Tortoise.init(TORTOISE_ORM)
_p = "$2b$12$3k.eYVcZxKRbSpRaz/R5luVxI0QI.CRiANGE8LINDGU6El9jYQxgC"
usernames = [uuid.uuid4().hex for _ in range(user_count)]
u_id = await get_model_next_id(User)
users = [
User(
id=u_id + i,
username=un,
email=f"{un}@{{ cookiecutter.project_slug }}.com",
password=_p,
)
for i, un in enumerate(usernames)
]
ts = time.time()
await User.bulk_create(users)
print(f"User: {len(users)} rows in {time.time() - ts:.5f}s)")
uc = len(users)
print(
"Data generation finished!\n"
f"Total of {sum((uc, ))} objects created!\n"
f"Generated {uc} Users"
)
toc = await User.all().count()
print(
"Total elements in tables:\n"
f"Users: {toc}\n"
f"Total: {toc}"
)
def main():
start_ts = time.time()
run_async(do_import())
end_ts = time.time()
print(f"Data generated in {end_ts - start_ts:.3f}s")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,42 @@
import os.path
import time
from typing import Sequence, Type
import orjson
from tortoise import Model, Tortoise, run_async
from tortoise.transactions import atomic
from service.config import TORTOISE_ORM
from service.database.models import User
models: Sequence[Type[Model]] = (User, )
@atomic()
async def import_data():
for filename, model in ((model._meta.db_table, model) for model in models):
ts = time.time()
_fn = f"fixtures/{filename}.json"
if not os.path.exists(_fn):
continue
with open(_fn, "r") as f:
data = orjson.loads(f.read())
await model.bulk_create((model(**row) for row in data), ignore_conflicts=True)
te = time.time()
print(f"{model.__name__} imported ({len(data)} rows in {te - ts:.5f}s)")
async def do_import():
await Tortoise.init(TORTOISE_ORM)
await import_data()
def main():
start_ts = time.time()
run_async(do_import())
end_ts = time.time()
print(f"Import completed in {end_ts - start_ts:.3f}s")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,26 @@
from tortoise import BaseDBAsyncClient
async def upgrade(db: BaseDBAsyncClient) -> str:
return """
CREATE TABLE IF NOT EXISTS "user" (
"id" BIGSERIAL NOT NULL PRIMARY KEY,
"created_at" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
"modified_at" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
"email" VARCHAR(255) NOT NULL UNIQUE,
"username" VARCHAR(32) NOT NULL UNIQUE,
"password" VARCHAR(256) NOT NULL,
"is_superuser" BOOL NOT NULL DEFAULT False
);
COMMENT ON COLUMN "user"."is_superuser" IS 'Is user a SuperUser?';
CREATE TABLE IF NOT EXISTS "aerich" (
"id" SERIAL NOT NULL PRIMARY KEY,
"version" VARCHAR(255) NOT NULL,
"app" VARCHAR(100) NOT NULL,
"content" JSONB NOT NULL
);"""
async def downgrade(db: BaseDBAsyncClient) -> str:
return """
"""

View File

@ -0,0 +1,36 @@
[tool.black]
line-length = 120
target-version = ['py311']
include = '\.pyi?$'
extend-exclude = '''(
migrations/*
| .git/*
| media/*
)'''
workers = 4
[tool.isort]
profile = "black"
line_length = 120
skip = ["migrations", "env", "venv", ".venv", ".git", "media"]
[tool.aerich]
tortoise_orm = "service.config.TORTOISE_ORM"
location = "./migrations"
src_folder = "./."
[tool.mypy]
python_version = "3.11"
exclude = [
'^\.?venv/',
'migrations/'
]
plugins = ["pydantic.mypy"]
warn_unused_configs = true
disallow_untyped_defs = true
implicit_optional = true
warn_redundant_casts = true
warn_no_return = false
ignore_missing_imports = false

View File

@ -0,0 +1,26 @@
fastapi==0.115.5 # https://github.com/tiangolo/fastapi
uvicorn==0.32.1 # https://pypi.org/project/uvicorn/
pydantic[email]==2.10.2 # https://github.com/pydantic/pydantic
pydantic-settings[yaml]==2.6.1 # https://github.com/pydantic/pydantic-settings/
tortoise-orm[accel,asyncpg]==0.22.1 # https://pypi.org/project/tortoise-orm/
aerich==0.7.2 # https://pypi.org/project/aerich/
setech==1.4.2 # https://pypi.org/project/setech/
python-multipart==0.0.18 # https://pypi.org/project/python-multipart/
email-validator==2.2.0 # https://pypi.org/project/email-validator/
tenacity==9.0.0 # https://pypi.org/project/tenacity/
pydantic==2.10.2 # https://pypi.org/project/pydantic/
#emails==0.6 # https://pypi.org/project/emails/
python-jose[cryptography]==3.3 # https://pypi.org/project/python-jose/
passlib[bcrypt]==1.7.4 # https://pypi.org/project/passlib/
bcrypt==4.2.1 # https://pypi.org/project/bcrypt/
# Pin bcrypt until passlib supports the latest
pydantic-settings==2.6.1 # https://pypi.org/project/pydantic-settings/
asyncio==3.4.3

View File

@ -0,0 +1,25 @@
-r base.txt
uvicorn[standard]==0.32.1 # https://pypi.org/project/uvicorn/
black==24.10.0 # https://pypi.org/project/black/
isort==5.13.2 # https://pypi.org/project/isort/
pur==7.3.2 # https://pypi.org/project/pur/
pre-commit==4.0.1 # https://pypi.org/project/pre-commit/
flake8==7.1.1
pytest==8.3.3 # https://pypi.org/project/pytest/
coverage==7.6.8 # https://pypi.org/project/coverage/
mypy==1.13.0 # https://pypi.org/project/mypy/
types-python-jose==3.3.4.20240106 # https://pypi.org/project/types-python-jose/
types-passlib==1.7.7.20240819 # https://pypi.org/project/types-passlib/
types-PyYAML==6.0.12.20240917
types-Pygments==2.18.0.20240506
types-colorama==0.4.15.20240311
types-decorator==5.1.8.20240310
types-six==1.16.21.20241105
types-ujson==5.10.0.20240515
settings-doc==4.3.1 # https://github.com/radeklat/settings-doc
ipython==8.30.0

View File

@ -0,0 +1,3 @@
-r base.txt
sentry-sdk[fastapi]==2.19.0 # https://pypi.org/project/sentry-sdk/

View File

@ -0,0 +1,72 @@
from typing import Annotated
from fastapi import Depends, HTTPException
from fastapi.security import OAuth2PasswordBearer
from jose import JWTError, jwt
from pydantic import ValidationError
from setech.utils import get_logger
from starlette import status
from service.api.models.auth import TokenPayload
from service.config import settings
from service.constants import security
from service.constants.types import PaginationParams
from service.database.models import AnonymousUser, User
__all__ = ["LoggedInUser", "QueryParams", "CurrentRequestUser", "RequestUser"]
_l = get_logger("api")
reusable_oauth2 = OAuth2PasswordBearer(tokenUrl=f"{settings.root_path}/login/access-token", auto_error=False)
TokenDep = Annotated[str | None, Depends(reusable_oauth2)]
RequestUser = User | AnonymousUser
async def get_current_user(token: TokenDep) -> User:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
if token is None:
raise credentials_exception
try:
payload = jwt.decode(token, settings.secret_key, algorithms=[security.ALGORITHM])
token_data = TokenPayload(**payload)
except (JWTError, ValidationError, AttributeError):
raise credentials_exception
user = await User.filter(username=token_data.sub).first()
if user is None:
raise credentials_exception
return user
LoggedInUser = Annotated[User, Depends(get_current_user)]
async def get_request_user(token: TokenDep) -> RequestUser:
if not token:
return AnonymousUser()
try:
user = await get_current_user(token)
except HTTPException:
return AnonymousUser()
return user
CurrentRequestUser = Annotated[RequestUser, Depends(get_request_user)]
def query_params(q: str | None = None, page: int = 1, limit: int = 10, order: str | None = None) -> PaginationParams:
page -= 1
if page < 0:
page = 0
if limit < 0:
limit = 1
if limit > 250:
limit = 250
_l.info(f"Filtering by: {q=}, {page=}, {limit=} | Ordering by: {order}")
return PaginationParams(q=q, offset=page * limit, limit=limit, order=order)
QueryParams = Annotated[PaginationParams, Depends(query_params)]

View File

@ -0,0 +1,69 @@
import time
from typing import Any, Callable
from fastapi import FastAPI, Request
from setech.utils import get_logger
from starlette import status
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import RedirectResponse, Response
from tortoise.contrib.fastapi import register_tortoise
from service.api.routes import api_router
from service.config import TORTOISE_ORM, settings
from service.utils.web import do_init
do_init()
logger = get_logger("api")
app = FastAPI(
debug=settings.debug,
root_path=settings.root_path,
title=settings.project_name,
version="0.1.0",
servers=[
{"url": "http://localhost:8000/", "description": "Local"},
{"url": "https://test.{{ cookiecutter.project_slug }}.com/", "description": "Staging environment"},
{"url": "https://{{ cookiecutter.project_slug }}.com/", "description": "Production environment"},
],
)
@app.middleware("http")
async def add_process_time_header(request: Request, call_next: Callable[[Any], Any]) -> Response:
start_time = time.time()
if settings.debug:
logger.info(
f"Received request for '{request.url}'",
extra={
"headers": request.headers,
"content": await request.body(),
"cookies": request.cookies,
"url": request.url,
},
)
response: Response = await call_next(request)
process_time = time.time() - start_time
response.headers["X-Process-Time"] = str(process_time)
return response
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # [str(origin).strip("/") for origin in settings.cors_origins],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_router)
@app.get("/", tags=["Root redirect"])
def root_view() -> RedirectResponse:
return RedirectResponse("/docs", status_code=status.HTTP_308_PERMANENT_REDIRECT)
register_tortoise(
app,
config=TORTOISE_ORM,
add_exception_handlers=True,
)

View File

@ -0,0 +1,10 @@
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str = "bearer"
class TokenPayload(BaseModel):
sub: str | None = None

View File

@ -0,0 +1,21 @@
from typing import Annotated
from pydantic import AfterValidator, BaseModel, EmailStr
from service.core.exceptions import InvalidEmail
from service.utils.validation import validate_email
def email_validation(email: EmailStr) -> str:
try:
validated = validate_email(email)
except InvalidEmail as e:
raise AssertionError(str(e))
return validated.normalized
ValidEmail = Annotated[EmailStr, AfterValidator(email_validation)]
class Message(BaseModel):
message: str

View File

@ -0,0 +1,8 @@
from fastapi import APIRouter
from .auth import auth_router
from .users import user_router
api_router = APIRouter()
api_router.include_router(auth_router, prefix="/login", tags=["Authorisation"])
api_router.include_router(user_router, prefix="/users", tags=["Users"])

View File

@ -0,0 +1,36 @@
from typing import Annotated
from fastapi import APIRouter, Depends, HTTPException
from fastapi.security import OAuth2PasswordRequestForm
from starlette import status
from service.api.dependencies import LoggedInUser
from service.api.models.auth import Token
from service.core import security
from service.crud.user import UserMe, authenticate
auth_router = router = APIRouter()
@router.post("/access-token")
async def login_access_token(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]) -> Token:
"""
OAuth2 compatible token login, get an access token for future requests
"""
user = await authenticate(username=form_data.username, password=form_data.password)
if not user:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Incorrect email and/or password")
return Token(access_token=security.create_access_token(user.username))
@router.post("/test-token", response_model=UserMe)
async def test_token(current_user: LoggedInUser) -> UserMe:
"""
Test access token
"""
return UserMe(
id=current_user.id,
email=current_user.email,
is_superuser=current_user.is_superuser,
username=current_user.username,
)

View File

@ -0,0 +1,27 @@
from fastapi import APIRouter, HTTPException
from setech.utils import get_logger
from service.api.dependencies import QueryParams
from service.core.exceptions import InvalidEmail
from service.crud.user import UserMe, UserPublic, UserRegister, get_user_by_username, get_users, register_user
user_router = router = APIRouter()
logger = get_logger("api")
@router.get("/list")
async def list_users_view(_params: QueryParams) -> list[UserPublic] | None:
return await get_users(_params)
@router.post("/register")
async def register_user_view(data: UserRegister) -> UserMe | None:
try:
if await get_user_by_username(username=data.email):
raise HTTPException(400, "Given email is already registered!")
except InvalidEmail as e:
logger.warning(f"Attempting to register with invalid email: '{data.email}'")
raise HTTPException(400, str(e))
logger.info(f"Creating user with email: '{data.email}'")
user = await register_user(data)
return UserMe(id=user.id, username=user.username, email=user.email, is_superuser=user.is_superuser)

View File

@ -0,0 +1,90 @@
import os
import pathlib
from service.config._settings import ProjectSettings
__all__ = ["ROOT_DIR", "TORTOISE_ORM", "settings", "LOGGING"]
ROOT_DIR = pathlib.Path(__file__).parent.parent.parent
settings = ProjectSettings()
os.environ.update({"APP_NAME": settings.project_name})
TORTOISE_ORM = {
"connections": {"default": settings.database_url},
"apps": {
"service": {
"models": ["service.database.models", "aerich.models"],
"default_connection": "default",
},
},
"use_tz": True,
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"filters": {},
"root": {"level": settings.log_level},
"formatters": {
"structured": {
"()": "setech.logging.LogJSONFormatter",
},
"simple": {
"format": "[%(asctime)s] %(levelname)-4s: %(message)s",
"datefmt": "%F %T",
},
"precise": {
"format": "[%(asctime)s][%(levelname)-4s][%(filename)s:%(funcName)s:%(lineno)s: %(message)s",
"datefmt": "%F %T %z",
},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple",
},
"console_json": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "structured",
},
"console_precise": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "precise",
},
"query_file": {
"level": "INFO",
"class": "logging.FileHandler",
"filename": "logs/query.log",
"formatter": "structured",
},
},
"loggers": {
"tasks": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
},
"service": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
},
"api": {
"level": "INFO",
"handlers": ["console_json"],
"propagate": False,
},
"crud.journal": {
"level": "DEBUG",
"handlers": ["console_json"],
"propagate": False,
},
"tortoise.db_client": {
"level": "DEBUG",
"handlers": ["console_precise", "query_file"],
"propagate": False,
},
},
}

View File

@ -0,0 +1,34 @@
from typing import Annotated
from pydantic import AnyUrl, BeforeValidator, Field, HttpUrl, RedisDsn
from service.constants import LogLevel
from service.constants.system import Environment
from service.helpers.settings import ProjectBaseSettings, parse_cors
class ProjectSettings(ProjectBaseSettings):
# Fast API Settings
root_path: str = Field("", examples=["/api/v2", ""])
project_name: str = Field("{{ cookiecutter.project_slug }}")
debug: bool = Field(False)
# Security Settings
secret_key: str = Field("CHANGE_ME--8^&gnoqen9+&9usjpjnsw*lhfqnl45p!^hdvf*s*i--INSECURE")
token_expiration_days: int = Field(1)
cors_origins: Annotated[list[AnyUrl] | str, BeforeValidator(parse_cors)] = []
# Service values
environment: Environment = Field(Environment.local, examples=[_ for _ in Environment])
log_level: LogLevel = Field(LogLevel.info, examples=[_ for _ in LogLevel])
# Background task config
redis_url: RedisDsn = Field(RedisDsn(url="redis://redis:6379"))
database_url: str = Field("psql://{{ cookiecutter.project_slug }}:{{ cookiecutter.project_slug }}@postgres:5432/{{ cookiecutter.project_slug }}")
# Various
timezone: str = Field(
"Europe/Riga",
examples=["UTC", "Europe/Riga", "Europe/London", "US/Pacific"],
)
sentry_url: HttpUrl | None = Field(None)

View File

@ -0,0 +1,5 @@
from .system import LogLevel
__all__ = [
"LogLevel",
]

View File

@ -0,0 +1 @@
ALGORITHM = "HS256"

View File

@ -0,0 +1,16 @@
from enum import StrEnum
class LogLevel(StrEnum):
debug = "DEBUG"
info = "INFO"
warning = "WARNING"
error = "ERROR"
critical = "CRITICAL"
class Environment(StrEnum):
local = "local"
tests = "testing"
staging = "staging"
prod = "production"

View File

@ -0,0 +1,13 @@
from dataclasses import dataclass
@dataclass(frozen=True)
class PaginationParams:
q: str | None
offset: int
limit: int
order: str | None
@property
def as_dict(self) -> dict[str, str | int | None]:
return dict(q=self.q, offset=self.offset, limit=self.limit, order=self.order)

View File

@ -0,0 +1,54 @@
import asyncio
import logging
from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed
from tortoise import Tortoise
from service.config import TORTOISE_ORM
from service.utils.logging import init_logging
init_logging()
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("service")
max_tries = 2 # 25 * 1 # 5 minutes
wait_seconds = 5
@retry(
stop=stop_after_attempt(max_tries),
wait=wait_fixed(wait_seconds),
before=before_log(logger, logging.INFO),
after=after_log(logger, logging.WARN),
)
async def init() -> None:
try:
await Tortoise.init(TORTOISE_ORM)
except Exception as e:
logger.error(e)
raise e
@retry(
stop=stop_after_attempt(max_tries),
wait=wait_fixed(wait_seconds),
before=before_log(logger, logging.INFO),
after=after_log(logger, logging.WARN),
)
async def init_db() -> None:
try:
await Tortoise.init(TORTOISE_ORM)
except Exception as e:
logger.exception(e)
raise e
def main() -> None:
logger.info("Initializing service")
loop = asyncio.get_event_loop()
loop.run_until_complete(init_db())
logger.info("Service finished initializing")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,2 @@
class InvalidEmail(Exception):
pass

View File

@ -0,0 +1,25 @@
from datetime import datetime, timedelta
from typing import Any
from jose import jwt
from passlib.context import CryptContext
from service.config import settings
from service.constants.security import ALGORITHM
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def create_access_token(subject: str | Any) -> str:
expire = datetime.utcnow() + timedelta(days=settings.token_expiration_days)
to_encode = {"exp": expire, "sub": str(subject)}
encoded_jwt = jwt.encode(to_encode, settings.secret_key, algorithm=ALGORITHM)
return encoded_jwt
def verify_password(plain_password: str, hashed_password: str) -> bool:
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password: str) -> str:
return pwd_context.hash(password)

View File

@ -0,0 +1,17 @@
from .methods import authenticate, get_user_by_email, get_user_by_username, get_users, register_user
from .models import UpdatePassword, UserCreate, UserMe, UserPublic, UserRegister, UsersPublic, UserUpdate
__all__ = [
"authenticate",
"get_user_by_email",
"get_user_by_username",
"get_users",
"register_user",
"UsersPublic",
"UserMe",
"UserPublic",
"UpdatePassword",
"UserUpdate",
"UserCreate",
"UserRegister",
]

View File

@ -0,0 +1,39 @@
from service.core.security import verify_password
from service.database.models import User
from ..utils import order_queryset
from ...api.dependencies import QueryParams
from .models import UserPublic, UserRegister
async def authenticate(*, username: str, password: str) -> User | None:
db_user = await get_user_by_username(username=username)
if not db_user:
return None
return db_user if verify_password(password, db_user.password) else None
async def get_user_by_email(*, email: str) -> User | None:
return await User.filter(email=email).first()
async def get_user_by_username(*, username: str) -> User | None:
return await User.filter(username=username).first()
async def get_users(filters: QueryParams) -> list[UserPublic]:
queryset = User.all()
if filters.q:
queryset = queryset.filter(username__icontains=filters.q)
queryset = order_queryset(queryset, filters, "-modified_at")
return [
UserPublic(id=u.id, username=u.username) for u in await queryset.offset(filters.offset).limit(filters.limit)
]
async def register_user(data: UserRegister) -> User:
user_object = User(email=data.email, username=data.username)
user_object.set_password(data.password)
await user_object.save()
await user_object.refresh_from_db()
return user_object

View File

@ -0,0 +1,48 @@
from pydantic import BaseModel
from service.api.models.generic import ValidEmail
__all__ = ["UsersPublic", "UserMe", "UserPublic", "UpdatePassword", "UserUpdate", "UserCreate", "UserRegister"]
class UserBase(BaseModel):
email: ValidEmail
username: str
is_superuser: bool = False
class UserCreate(UserBase):
password: str
class UserRegister(BaseModel):
email: ValidEmail
username: str
password: str
class UserUpdate(UserBase):
email: ValidEmail | None = None # type: ignore
username: str
password: str | None = None
class UpdatePassword(BaseModel):
current_password: str
new_password: str
new_password_repeat: str
class UserPublic(BaseModel):
id: int
username: str
class UserMe(UserBase):
id: int
username: str
class UsersPublic(BaseModel):
data: list[UserPublic]
count: int

View File

@ -0,0 +1,8 @@
from tortoise.queryset import QuerySet
from service.constants.types import PaginationParams
def order_queryset(qs: QuerySet, filters: PaginationParams, default: str) -> QuerySet:
ordering = [f for f in filters.order.split(",") if f.split("-")[-1] in qs.fields]
return qs.order_by(*(ordering or (default, )))

View File

@ -0,0 +1,42 @@
from datetime import datetime
from typing import Literal
from tortoise import fields
from tortoise.models import Model
from tortoise.validators import MinLengthValidator
from service.core.security import get_password_hash
from service.database.validators import EmailValidator
__all__ = [
"User",
"AnonymousUser",
]
class TimestampMixin(Model):
id: int = fields.BigIntField(pk=True)
created_at: datetime = fields.DatetimeField(null=True, auto_now_add=True)
modified_at: datetime = fields.DatetimeField(null=True, auto_now=True)
class Meta:
abstract = True
class User(TimestampMixin, Model):
email = fields.CharField(max_length=255, validators=[MinLengthValidator(5), EmailValidator(False)], unique=True)
username = fields.CharField(max_length=32, unique=True)
password = fields.CharField(max_length=256)
is_superuser = fields.BooleanField(default=False, description="Is user a SuperUser?")
# Helper for auto-complete and typing
# model_set: fields.ReverseRelation["DatabaseModel"]
def set_password(self, new_password: str) -> None:
self.password = get_password_hash(new_password)
class AnonymousUser:
id: Literal[0] = 0
email: Literal[""] = ""
username: Literal[""] = ""

View File

@ -0,0 +1,16 @@
from tortoise.exceptions import ValidationError
from tortoise.validators import Validator
from service.core.exceptions import InvalidEmail
from service.utils.validation import validate_email
class EmailValidator(Validator):
def __init__(self, use_dns: bool = False):
self.use_dns = use_dns
def __call__(self, value: str) -> None:
try:
validate_email(value, self.use_dns)
except InvalidEmail as e:
raise ValidationError(str(e))

View File

@ -0,0 +1,46 @@
from pathlib import Path
from typing import Any, Tuple, Type
import yaml
from pydantic_settings import BaseSettings, InitSettingsSource, PydanticBaseSettingsSource, SettingsConfigDict
from pydantic_settings.sources import ConfigFileSourceMixin, EnvSettingsSource
def parse_cors(v: Any) -> list[str] | str:
if isinstance(v, str) and not v.startswith("["):
return [i.strip() for i in v.split(",")]
elif isinstance(v, list | str):
return v
raise ValueError(v)
class ProjectBaseSettings(BaseSettings):
model_config = SettingsConfigDict(yaml_file="envs.yml", yaml_file_encoding="utf-8", env_prefix="service")
@classmethod
def settings_customise_sources(
cls,
settings_cls: Type[BaseSettings],
init_settings: PydanticBaseSettingsSource,
env_settings: EnvSettingsSource, # type: ignore
dotenv_settings: PydanticBaseSettingsSource,
file_secret_settings: PydanticBaseSettingsSource,
) -> Tuple[PydanticBaseSettingsSource, ...]:
return (
init_settings,
PrefixedYamlConfigSettingsSource(env_settings.env_prefix, settings_cls),
env_settings,
file_secret_settings,
)
class PrefixedYamlConfigSettingsSource(InitSettingsSource, ConfigFileSourceMixin):
def __init__(self, prefix: str, settings_cls: type[BaseSettings]):
self.yaml_file_path = "envs.yml"
self.yaml_file_encoding = "utf-8"
self.yaml_data = self._read_files(self.yaml_file_path)[prefix]
super().__init__(settings_cls, self.yaml_data)
def _read_file(self, file_path: Path) -> dict[str, dict[str, Any]]:
with open(file_path, encoding=self.yaml_file_encoding) as yaml_file:
return yaml.safe_load(yaml_file)

View File

@ -0,0 +1,33 @@
import logging.config
from pathlib import Path
from setech.utils import get_logger
from service.config import settings
def init_logging() -> None:
from service.config import LOGGING
Path("logs").mkdir(parents=True, exist_ok=True)
logging.config.dictConfig(LOGGING)
def initialize_sentry() -> None:
if settings.sentry_url:
try:
import sentry_sdk # type: ignore
from sentry_sdk.integrations.fastapi import FastApiIntegration # type: ignore
from sentry_sdk.integrations.starlette import StarletteIntegration # type: ignore
sentry_sdk.init(
dsn=str(settings.sentry_url),
enable_tracing=True,
environment=settings.environment,
integrations=[
StarletteIntegration(transaction_style="endpoint"),
FastApiIntegration(transaction_style="endpoint"),
],
)
except (Exception, ModuleNotFoundError, ImportError) as e: # noqa
get_logger().exception("Unable to set up Sentry integration", exc_info=e)

View File

@ -0,0 +1,20 @@
import datetime
import zoneinfo
def time_now() -> datetime.datetime:
return datetime.datetime.now(zoneinfo.ZoneInfo("UTC"))
def time_utc_now() -> datetime.datetime:
return _time_in_timezone()
def _time_in_timezone(
dt: datetime.datetime | None = None, zone_info: str | zoneinfo.ZoneInfo = "UTC"
) -> datetime.datetime:
if dt is None:
dt = datetime.datetime.now(zoneinfo.ZoneInfo("UTC"))
if isinstance(zone_info, str):
zone_info = zoneinfo.ZoneInfo(zone_info)
return dt.astimezone(zone_info)

View File

@ -0,0 +1,16 @@
import email_validator
from email_validator import EmailNotValidError, ValidatedEmail
from service.core.exceptions import InvalidEmail
def validate_email(value: str, use_dns: bool = None) -> ValidatedEmail:
try:
return email_validator.validate_email(
value,
# globally_deliverable=use_dns,
check_deliverability=use_dns,
allow_domain_literal=False,
)
except EmailNotValidError as e:
raise InvalidEmail(str(e))

View File

@ -0,0 +1,6 @@
from service.utils.logging import init_logging, initialize_sentry
def do_init() -> None:
init_logging()
initialize_sentry()