[TC-221]: init template-backend-service (#1)
https://eva.avroid.tech/desk/cards?obj=Task:TC-221 Начальная версия сервиса-шаблона Co-authored-by: Nadezhda <nadezhda.lavrentieva@avroid.team> Reviewed-on: https://git.avroid.tech/Templates/template-backend-service/pulls/1 Reviewed-by: Victor Stratov <victor.stratov@avroid.tech>
This commit is contained in:
45
.gitignore
vendored
Normal file
45
.gitignore
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.pytest_cache/
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Ruff
|
||||
.ruff_cache
|
||||
|
||||
.vscode
|
||||
.idea/*
|
||||
.nvim*
|
||||
.vim*
|
||||
|
||||
# MacOS stuff
|
||||
.DS_Store
|
||||
|
||||
local.env
|
||||
32
.helm/values.preprod.yaml
Normal file
32
.helm/values.preprod.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
replicaCount: 1
|
||||
|
||||
extraEnv:
|
||||
POSTGRES_USER:
|
||||
value: "test"
|
||||
POSTGRES_PASSWORD:
|
||||
value: "test"
|
||||
POSTGRES_HOST:
|
||||
value: "cloud-postgres.avroid.cloud"
|
||||
POSTGRES_DB:
|
||||
value: "messenger"
|
||||
POSTGRES_PORT:
|
||||
value: "5432"
|
||||
|
||||
SCYLLADB_HOST:
|
||||
value: "cloud-scylla.avroid.cloud"
|
||||
SCYLLADB_PORT:
|
||||
value: "9042"
|
||||
SCYLLADB_USER:
|
||||
value: "test"
|
||||
SCYLLADB_PASSWORD:
|
||||
value: "test"
|
||||
SCYLLADB_KEYSPACE:
|
||||
value: "messenger"
|
||||
|
||||
PORT:
|
||||
value: "8000"
|
||||
ENVIRONMENT:
|
||||
value: "preprod"
|
||||
|
||||
service:
|
||||
port: 8000
|
||||
31
.helm/values.prod.yaml
Normal file
31
.helm/values.prod.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
replicaCount: 1
|
||||
|
||||
extraEnv:
|
||||
POSTGRES_USER:
|
||||
value: "test"
|
||||
POSTGRES_PASSWORD:
|
||||
value: "test"
|
||||
POSTGRES_HOST:
|
||||
value: "cloud-postgres.avroid.cloud"
|
||||
POSTGRES_DB:
|
||||
value: "messenger"
|
||||
POSTGRES_PORT:
|
||||
value: "5432"
|
||||
|
||||
SCYLLADB_HOST:
|
||||
value: "cloud-scylla.avroid.cloud"
|
||||
SCYLLADB_PORT:
|
||||
value: "9042"
|
||||
SCYLLADB_USER:
|
||||
value: "test"
|
||||
SCYLLADB_PASSWORD:
|
||||
value: "test"
|
||||
SCYLLADB_KEYSPACE:
|
||||
value: "messenger"
|
||||
PORT:
|
||||
value: "8000"
|
||||
ENVIRONMENT:
|
||||
value: "production"
|
||||
|
||||
service:
|
||||
port: 8000
|
||||
31
.helm/values.stage.yaml
Normal file
31
.helm/values.stage.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
replicaCount: 1
|
||||
|
||||
extraEnv:
|
||||
POSTGRES_USER:
|
||||
value: "test"
|
||||
POSTGRES_PASSWORD:
|
||||
value: "test"
|
||||
POSTGRES_HOST:
|
||||
value: "cloud-postgres.avroid.cloud"
|
||||
POSTGRES_DB:
|
||||
value: "messenger"
|
||||
POSTGRES_PORT:
|
||||
value: "5432"
|
||||
|
||||
SCYLLADB_HOST:
|
||||
value: "cloud-scylla.avroid.cloud"
|
||||
SCYLLADB_PORT:
|
||||
value: "9042"
|
||||
SCYLLADB_USER:
|
||||
value: "test"
|
||||
SCYLLADB_PASSWORD:
|
||||
value: "test"
|
||||
SCYLLADB_KEYSPACE:
|
||||
value: "messenger"
|
||||
PORT:
|
||||
value: "8000"
|
||||
ENVIRONMENT:
|
||||
value: "stage"
|
||||
|
||||
service:
|
||||
port: 8000
|
||||
25
.pre-commit-config.yaml
Normal file
25
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.2
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: format
|
||||
name: format
|
||||
entry: make format
|
||||
language: system
|
||||
pass_filenames: false
|
||||
types: [python]
|
||||
- id: lint
|
||||
name: lint
|
||||
entry: make lint
|
||||
language: system
|
||||
pass_filenames: false
|
||||
types: [python]
|
||||
2
CHANGELOG.md
Normal file
2
CHANGELOG.md
Normal file
@@ -0,0 +1,2 @@
|
||||
# Changlog
|
||||
|
||||
15
Dockerfile
Normal file
15
Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM python:3.12
|
||||
|
||||
WORKDIR /app
|
||||
EXPOSE 8000
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
RUN pip --no-cache-dir install poetry
|
||||
RUN poetry export --without-hashes -f requirements.txt -o requirements.txt
|
||||
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["python", "-m", "src.api_app"]
|
||||
31
Makefile
Normal file
31
Makefile
Normal file
@@ -0,0 +1,31 @@
|
||||
SERVICE_DIR := src
|
||||
TXT_BOLD := \e[1m
|
||||
TXT_MAGENTA := \e[35m
|
||||
TXT_RESET := \e[0m
|
||||
|
||||
setup:
|
||||
@poetry install --sync
|
||||
|
||||
setup-pre-commit:
|
||||
@poetry run pre-commit install
|
||||
|
||||
lint:
|
||||
@printf "${TXT_BOLD}${TXT_MAGENTA}=========================== RUFF ==============================${TXT_RESET}\n"
|
||||
@poetry run ruff check --fix --show-fixes --exit-non-zero-on-fix .
|
||||
@printf "${TXT_BOLD}${TXT_MAGENTA}=========================== MYPY ==============================${TXT_RESET}\n"
|
||||
@poetry run mypy $(SERVICE_DIR)/
|
||||
|
||||
format:
|
||||
@poetry run ruff format $(SERVICE_DIR)/ tests/
|
||||
|
||||
start:
|
||||
@poetry run python -m $(SERVICE_DIR).api_app
|
||||
|
||||
migration:
|
||||
@poetry run alembic revision --autogenerate
|
||||
|
||||
migrate:
|
||||
@poetry run alembic upgrade head
|
||||
|
||||
test:
|
||||
@poetry run pytest tests --cov $(SERVICE_DIR) -vv
|
||||
35
README.md
35
README.md
@@ -1,2 +1,37 @@
|
||||
# template-backend-service
|
||||
|
||||
# Базовый шаблон для микросервисов в Avroid
|
||||
|
||||
|
||||
## TODO:
|
||||
- логирование
|
||||
- докеризация
|
||||
- кафка
|
||||
- sentry
|
||||
- миграции
|
||||
|
||||
# HOW TO
|
||||
|
||||
## Настроить pre-commit и запустить проект
|
||||
### !!! (поменяйте креды в local.env на свои личные)
|
||||
|
||||
```bash
|
||||
make setup
|
||||
make setup-pre-commit
|
||||
make start
|
||||
|
||||
```
|
||||
|
||||
## Запустить тесты:
|
||||
|
||||
Note: тесты запускаются в локальной БД на локальной машине!
|
||||
Перед запуском проверьте, что у вас есть указанный в `tests.conftest` юзер с нужным паролем (можно указать свой) и
|
||||
правами!
|
||||
|
||||
(И что в схеме public нет ничего нужного, потому что она дропается!)
|
||||
|
||||
```bash
|
||||
make test
|
||||
```
|
||||
|
||||
При локальном разворачивании документация доступна по адресу: http://0.0.0.0:8000/docs
|
||||
|
||||
117
alembic.ini
Normal file
117
alembic.ini
Normal file
@@ -0,0 +1,117 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||
script_location = ./migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to ./migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:./migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
# version_path_separator = newline
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
40
docker-compose.yml
Normal file
40
docker-compose.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
volumes:
|
||||
postgresql: {}
|
||||
|
||||
services:
|
||||
template-backend-service:
|
||||
build: .
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./:/app
|
||||
environment:
|
||||
POSTGRES_DSN: "postgresql://test:test@cloud-postgres.avroid.cloud:5432/messenger"
|
||||
POSTGRES_USER: "test"
|
||||
POSTGRES_PASSWORD: "test"
|
||||
POSTGRES_HOST: "cloud-postgres.avroid.cloud"
|
||||
POSTGRES_DB: "messenger"
|
||||
|
||||
PORT: "8000"
|
||||
|
||||
SCYLLADB_HOST: "cloud-scylla.avroid.cloud"
|
||||
SCYLLADB_PORT: "9042"
|
||||
SCYLLADB_USER: "test"
|
||||
SCYLLADB_PASSWORD: "test"
|
||||
SCYLLADB_KEYSPACE: "messenger"
|
||||
LOGGING: '{"json_enabled": true, "level": "INFO"}'
|
||||
ENVIRONMENT: "production"
|
||||
depends_on:
|
||||
- db
|
||||
db:
|
||||
image: postgres:14.8
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: "test"
|
||||
POSTGRES_PASSWORD: "test"
|
||||
POSTGRES_HOST: "cloud-postgres.avroid.cloud"
|
||||
POSTGRES_DB: "messenger"
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgresql:/var/lib/postgresql/data
|
||||
13
local.env.example
Normal file
13
local.env.example
Normal file
@@ -0,0 +1,13 @@
|
||||
POSTGRES_USER=test
|
||||
POSTGRES_PASSWORD=test
|
||||
POSTGRES_HOST=cloud-postgres.avroid.cloud
|
||||
POSTGRES_DB=messenger
|
||||
POSTGRES_PORT=5432
|
||||
|
||||
SCYLLADB_HOST=cloud-scylla.avroid.cloud
|
||||
SCYLLADB_PORT=9042
|
||||
SCYLLADB_USER=test
|
||||
SCYLLADB_PASSWORD=test
|
||||
SCYLLADB_KEYSPACE=messenger
|
||||
|
||||
PORT=8000
|
||||
1
migrations/README
Normal file
1
migrations/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
74
migrations/env.py
Normal file
74
migrations/env.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
migrations/script.py.mako
Normal file
26
migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
1348
poetry.lock
generated
Normal file
1348
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
97
pyproject.toml
Normal file
97
pyproject.toml
Normal file
@@ -0,0 +1,97 @@
|
||||
[tool.poetry]
|
||||
name = "template-backend-service"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Nadezhda Lavrenteva <nadezhda.lavrentieva@avroid.team>"]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.12 <3.13"
|
||||
coverage = "^7.6.1"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
python = ">=3.12 <3.13"
|
||||
alembic = "^1.13.3"
|
||||
fastapi = "^0.115.0"
|
||||
kafka-python = "^2.0.2"
|
||||
pydantic = "^2.9.2"
|
||||
sqlalchemy = "^2.0.35"
|
||||
pydantic-settings = "^2.5.2"
|
||||
uvicorn = "^0.31.0"
|
||||
scylla-driver = "^3.26.9"
|
||||
cassandra-driver = "^3.29.2"
|
||||
pyyaml = "^6.0.2"
|
||||
aiopg = {version = "^1.4.0", extras = ["sa"]}
|
||||
httpx = "^0.27.2"
|
||||
|
||||
[tool.poetry.group.format.dependencies]
|
||||
mypy = "^1.11.2"
|
||||
ruff = "^0.6.8"
|
||||
pre-commit = "^3.8.0"
|
||||
|
||||
[tool.poetry.group.tests.dependencies]
|
||||
pytest-asyncio = "^0.24.0"
|
||||
pytest-cov = "^5.0.0"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
concurrency = ["greenlet", "thread"]
|
||||
|
||||
[tool.mypy]
|
||||
warn_redundant_casts = true
|
||||
disallow_any_generics = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
check_untyped_defs = true
|
||||
disallow_untyped_decorators = true
|
||||
no_implicit_optional = true
|
||||
warn_unused_ignores = true
|
||||
warn_return_any = true
|
||||
strict_equality = true
|
||||
ignore_missing_imports = true
|
||||
plugins = ['pydantic.mypy']
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = "tests.*,migrations.*"
|
||||
ignore_errors = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
|
||||
[tool.ruff.lint]
|
||||
|
||||
select = ["E", "F", "W", "C90", "I", "N", "UP", "YTT", "ASYNC", "S", "B", "C4", "DTZ", "T10", "ISC", "ICN",
|
||||
"PIE", "T20", "PT", "Q", "RET", "SLF", "SIM", "ERA", "PLE", "TRY", "PYI", "PERF", "LOG", "RUF008",
|
||||
"RUF009", "RUF012", "RUF013", "RUF015", "RUF019", "RUF100", "RUF200"]
|
||||
unfixable = [
|
||||
"ERA", # do not autoremove commented out code
|
||||
]
|
||||
ignore = [
|
||||
"TRY003",
|
||||
"ISC001", # may cause conflicts when used with the formatter
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/*" = ["S", "C4", "ISC", "SLF", "RUF012"]
|
||||
"src/*" = ["PT"]
|
||||
"migrations/*" = ["S", "C4", "ISC", "SLF", "RUF012", "ERA001"]
|
||||
|
||||
|
||||
[tool.ruff.lint.pep8-naming]
|
||||
# Allow Pydantic's `@validator` decorator to trigger class method treatment.
|
||||
classmethod-decorators = [
|
||||
"classmethod",
|
||||
"pydantic.validator",
|
||||
"pydantic.root_validator",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.flake8-bugbear]
|
||||
extend-immutable-calls = ["fastapi.Depends", "fastapi.Query", "fastapi.Form", "fastapi.Body"]
|
||||
0
src/__init__.py
Normal file
0
src/__init__.py
Normal file
21
src/api_app.py
Normal file
21
src/api_app.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
|
||||
from src.routers.v1 import api_router
|
||||
from src.settings import SERVICE_NAME, WebAppSettings
|
||||
|
||||
|
||||
def create_app(settings: WebAppSettings) -> FastAPI:
|
||||
api = FastAPI(
|
||||
title=SERVICE_NAME,
|
||||
settings=settings,
|
||||
)
|
||||
api.include_router(api_router, prefix="/api")
|
||||
return api
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
settings_ = WebAppSettings()
|
||||
|
||||
app = create_app(settings_)
|
||||
uvicorn.run(app, port=settings_.port)
|
||||
0
src/database/__init__.py
Normal file
0
src/database/__init__.py
Normal file
17
src/database/postgresql.py
Normal file
17
src/database/postgresql.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.orm import Session, declarative_base, sessionmaker
|
||||
|
||||
from src.settings import WebAppSettings
|
||||
|
||||
|
||||
class PGConnector:
|
||||
def __init__(self, settings: WebAppSettings) -> None:
|
||||
self.pg_engine = create_engine(
|
||||
f"postgresql://{settings.postgres_user}:{settings.postgres_password}@{settings.postgres_host}:"
|
||||
f"{settings.postgres_port}/{settings.postgres_db}",
|
||||
)
|
||||
self.pg_session = sessionmaker(self.pg_engine, class_=Session)
|
||||
|
||||
|
||||
PGBase = declarative_base()
|
||||
pg_metadata = MetaData()
|
||||
23
src/database/scylla.py
Normal file
23
src/database/scylla.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from cassandra.auth import PlainTextAuthProvider
|
||||
from cassandra.cluster import Cluster
|
||||
from cassandra.cqlengine import connection
|
||||
|
||||
from src.settings import WebAppSettings
|
||||
|
||||
|
||||
class ScyllaConnector:
|
||||
def __init__(self, settings: WebAppSettings) -> None:
|
||||
self.auth_provider = PlainTextAuthProvider(
|
||||
username=settings.scylladb_user,
|
||||
password=settings.scylladb_password,
|
||||
)
|
||||
|
||||
self.cluster = Cluster(
|
||||
[settings.scylladb_host],
|
||||
auth_provider=self.auth_provider,
|
||||
port=settings.scylladb_port,
|
||||
)
|
||||
|
||||
self.scylladb_session = self.cluster.connect(keyspace=settings.scylladb_keyspace)
|
||||
connection.register_connection("main_cluster", session=self.scylladb_session)
|
||||
connection.set_default_connection("main_cluster")
|
||||
0
src/dependencies/__init__.py
Normal file
0
src/dependencies/__init__.py
Normal file
40
src/dependencies/dependencies.py
Normal file
40
src/dependencies/dependencies.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import AsyncGenerator, Generator
|
||||
|
||||
from cassandra.cluster import Session as ScyllaSession
|
||||
from fastapi import Depends, Request
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from src.database.postgresql import PGConnector
|
||||
from src.database.scylla import ScyllaConnector
|
||||
from src.repositories.repository import MessengerHandbookCountryRepositoryPG, TestRepository
|
||||
from src.settings import WebAppSettings
|
||||
|
||||
|
||||
async def get_settings(request: Request) -> WebAppSettings:
|
||||
settings: WebAppSettings = request.app.extra["settings"]
|
||||
return settings
|
||||
|
||||
|
||||
async def get_session_pg(settings: WebAppSettings = Depends(get_settings)) -> AsyncGenerator[Session, None]:
|
||||
pg_connect = PGConnector(settings)
|
||||
db = pg_connect.pg_session()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def get_session_scylla(settings: WebAppSettings = Depends(get_settings)) -> Generator[ScyllaSession, None, None]:
|
||||
scylla_connect = ScyllaConnector(settings)
|
||||
db = scylla_connect.scylladb_session
|
||||
yield db
|
||||
|
||||
|
||||
async def get_messenger_handbook_country_repository(
|
||||
request: Request, session: Session = Depends(get_session_pg)
|
||||
) -> MessengerHandbookCountryRepositoryPG:
|
||||
return MessengerHandbookCountryRepositoryPG(session)
|
||||
|
||||
|
||||
async def get_test_repository(request: Request, session: Session = Depends(get_session_scylla)) -> TestRepository:
|
||||
return TestRepository(session)
|
||||
0
src/integrations/__init__.py
Normal file
0
src/integrations/__init__.py
Normal file
0
src/models/__init__.py
Normal file
0
src/models/__init__.py
Normal file
20
src/models/base.py
Normal file
20
src/models/base.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from enum import Enum
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class RoleEnum(Enum):
|
||||
admin = 1
|
||||
manager = 2
|
||||
client = 3
|
||||
|
||||
|
||||
class Messenger(BaseModel):
|
||||
chat_id: UUID
|
||||
title: str | None = None
|
||||
|
||||
|
||||
class MessengerHandbookCountry(BaseModel):
|
||||
country_id: int
|
||||
iso_3166_code_alpha3: str | None = None
|
||||
0
src/repositories/__init__.py
Normal file
0
src/repositories/__init__.py
Normal file
77
src/repositories/repository.py
Normal file
77
src/repositories/repository.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import abc
|
||||
from typing import Any, Sequence, T # type: ignore
|
||||
|
||||
from cassandra.cqlengine.models import Model as ScyllaModel
|
||||
from sqlalchemy import Select, Table, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from src.models.base import Messenger, MessengerHandbookCountry
|
||||
from src.repositories.tables import MessengerTable, messenger_handbook_country_table
|
||||
|
||||
|
||||
class BaseRepositoryPG(abc.ABC):
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def model_cls(self) -> type[T]:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def table(self) -> Table:
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, session: Session) -> None:
|
||||
self._session = session
|
||||
|
||||
async def _get_from_query(self, query: Select[Any]) -> Sequence[T]:
|
||||
model_cls = self.model_cls
|
||||
with self._session.execute(query) as rows:
|
||||
result = rows.fetchall()
|
||||
return tuple(model_cls(**row._mapping) for row in result) # noqa
|
||||
|
||||
|
||||
class BaseRepositoryScylla(abc.ABC):
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def model_cls(self) -> type[T]:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def table(self) -> ScyllaModel:
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, session: Session) -> None:
|
||||
self._session = session
|
||||
|
||||
async def _get_from_query(self, query: Select[Any]) -> tuple[T, ...]:
|
||||
model_cls = self.model_cls
|
||||
result = self._session.execute(query)
|
||||
return tuple(model_cls(**row._mapping) for row in result) # noqa
|
||||
|
||||
|
||||
class MessengerHandbookCountryRepositoryPG(BaseRepositoryPG):
|
||||
table = messenger_handbook_country_table
|
||||
model_cls = MessengerHandbookCountry
|
||||
|
||||
async def get_from_query(self, id_: int) -> Sequence[MessengerHandbookCountry]:
|
||||
query = (
|
||||
(
|
||||
select(
|
||||
self.table.c.country_id,
|
||||
self.table.c.iso_3166_code_alpha3,
|
||||
)
|
||||
)
|
||||
.select_from(self.table)
|
||||
.where(self.table.c.country_id == id_)
|
||||
)
|
||||
return await self._get_from_query(query)
|
||||
|
||||
|
||||
class TestRepository(BaseRepositoryScylla):
|
||||
table = MessengerTable
|
||||
model_cls = Messenger
|
||||
|
||||
async def get_from_query(self) -> tuple[Messenger, ...]:
|
||||
result = self.table.objects.all()
|
||||
return tuple(self.model_cls(**row._mapping) for row in result) # noqa
|
||||
23
src/repositories/tables.py
Normal file
23
src/repositories/tables.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from cassandra.cqlengine.columns import UUID
|
||||
from cassandra.cqlengine.columns import Integer as CassandraInt
|
||||
from cassandra.cqlengine.models import Model
|
||||
from sqlalchemy import Column, Integer, String, Table
|
||||
|
||||
from src.database.postgresql import pg_metadata
|
||||
|
||||
|
||||
class MessengerTable(Model):
|
||||
__tablename__ = "messenger_common_user"
|
||||
__keyspace__ = "messenger"
|
||||
|
||||
user_id = UUID(primary_key=True)
|
||||
target_user_id = UUID(primary_key=True)
|
||||
via = CassandraInt()
|
||||
|
||||
|
||||
messenger_handbook_country_table = Table(
|
||||
"messenger_handbook_country",
|
||||
pg_metadata,
|
||||
Column("country_id", Integer, primary_key=True),
|
||||
Column("iso_3166_code_alpha3", String, nullable=True),
|
||||
)
|
||||
0
src/routers/__init__.py
Normal file
0
src/routers/__init__.py
Normal file
30
src/routers/v1.py
Normal file
30
src/routers/v1.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from typing import Annotated, Sequence
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import PositiveInt
|
||||
from starlette.requests import Request
|
||||
|
||||
from src.dependencies.dependencies import get_messenger_handbook_country_repository, get_test_repository
|
||||
from src.models.base import Messenger, MessengerHandbookCountry
|
||||
from src.repositories.repository import MessengerHandbookCountryRepositoryPG, TestRepository
|
||||
|
||||
api_router = APIRouter(prefix="/v1", tags=["v1"])
|
||||
|
||||
|
||||
@api_router.get("/test_psql/{test_id}")
|
||||
async def get_info_from_postgresql(
|
||||
request: Request,
|
||||
test_repository: Annotated[
|
||||
MessengerHandbookCountryRepositoryPG, Depends(get_messenger_handbook_country_repository)
|
||||
],
|
||||
test_id: PositiveInt,
|
||||
) -> Sequence[MessengerHandbookCountry]:
|
||||
return await test_repository.get_from_query(id_=test_id)
|
||||
|
||||
|
||||
@api_router.get("/test_scylla")
|
||||
async def get_info_from_scylla(
|
||||
request: Request,
|
||||
test_repository: Annotated[TestRepository, Depends(get_test_repository)],
|
||||
) -> Sequence[Messenger]:
|
||||
return await test_repository.get_from_query()
|
||||
0
src/services/__init__.py
Normal file
0
src/services/__init__.py
Normal file
24
src/settings.py
Normal file
24
src/settings.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from pydantic import PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class WebAppSettings(BaseSettings):
|
||||
port: PositiveInt
|
||||
|
||||
postgres_host: str
|
||||
postgres_port: int
|
||||
postgres_user: str
|
||||
postgres_db: str
|
||||
postgres_password: str
|
||||
|
||||
scylladb_host: str
|
||||
scylladb_port: PositiveInt
|
||||
scylladb_user: str
|
||||
scylladb_password: str
|
||||
scylladb_keyspace: str
|
||||
|
||||
class Config:
|
||||
env_file = "local.env"
|
||||
|
||||
|
||||
SERVICE_NAME = "avroid_service_template"
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
98
tests/conftest.py
Normal file
98
tests/conftest.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import warnings
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
import pytest
|
||||
from aiopg.sa import Engine, SAConnection, create_engine
|
||||
from fastapi import FastAPI
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy import Table
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.sql.ddl import CreateTable
|
||||
|
||||
from src.api_app import create_app
|
||||
from src.settings import WebAppSettings
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_settings() -> WebAppSettings:
|
||||
return WebAppSettings(
|
||||
postgres_user="postgres",
|
||||
postgres_password="postgres",
|
||||
postgres_host="localhost",
|
||||
postgres_db="postgres",
|
||||
postgres_port=5432,
|
||||
port=8000,
|
||||
scylladb_host="localhost",
|
||||
scylladb_port="9042",
|
||||
scylladb_user="test",
|
||||
scylladb_password="test",
|
||||
scylladb_keyspace="test",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_app(test_settings: WebAppSettings) -> FastAPI:
|
||||
return create_app(settings=test_settings)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def test_client(test_app: FastAPI) -> AsyncGenerator[AsyncClient, None]:
|
||||
async with AsyncClient(app=test_app, base_url="http://test/api/v1") as client:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sa_tables():
|
||||
"""
|
||||
Фикстура, с помощью которой можно локально переопределить перечень
|
||||
создаваемых таблиц.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Please, override `sa_tables` fixture if you are using `db_engine`.",
|
||||
stacklevel=2,
|
||||
)
|
||||
return []
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sa_enums():
|
||||
"""
|
||||
Фикстура, с помощью которой можно локально переопределить перечень
|
||||
создаваемых перечислений.
|
||||
"""
|
||||
return []
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def db_engine(test_settings: WebAppSettings, sa_tables, sa_enums) -> AsyncGenerator[Engine, None]:
|
||||
postgres_dsn = f"postgresql://{test_settings.postgres_user}:{test_settings.postgres_password}@{test_settings.postgres_host}:5432/{test_settings.postgres_db}"
|
||||
async with create_engine(postgres_dsn) as engine:
|
||||
async with engine.acquire() as connection:
|
||||
await drop_tables(connection)
|
||||
await create_enums(connection, sa_enums)
|
||||
await create_tables(connection, sa_tables)
|
||||
|
||||
yield engine
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def connection(db_engine: Engine) -> AsyncGenerator[SAConnection, None]:
|
||||
async with db_engine.acquire() as connection:
|
||||
yield connection
|
||||
|
||||
|
||||
async def drop_tables(connection: SAConnection):
|
||||
await connection.execute("DROP SCHEMA public CASCADE;")
|
||||
await connection.execute("CREATE SCHEMA public;")
|
||||
|
||||
|
||||
async def create_tables(connection: SAConnection, tables: list[Table]):
|
||||
for table in tables:
|
||||
ddl = str(CreateTable(table).compile(dialect=postgresql.dialect()))
|
||||
await connection.execute(ddl)
|
||||
|
||||
|
||||
async def create_enums(connection, enums):
|
||||
for enum in enums:
|
||||
ddl = str(postgresql.CreateEnumType(enum).compile(dialect=postgresql.dialect()))
|
||||
await connection.execute(ddl)
|
||||
9
tests/samples.py
Normal file
9
tests/samples.py
Normal file
@@ -0,0 +1,9 @@
|
||||
ACCOUNT_1 = {
|
||||
"country_id": 1,
|
||||
"iso_3166_code_alpha3": "RU",
|
||||
}
|
||||
|
||||
ACCOUNT_2 = {
|
||||
"country_id": 2,
|
||||
"iso_3166_code_alpha3": "EN",
|
||||
}
|
||||
23
tests/test_routes.py
Normal file
23
tests/test_routes.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import pytest
|
||||
from aiopg.sa import SAConnection
|
||||
from httpx import AsyncClient
|
||||
|
||||
from src.repositories.tables import messenger_handbook_country_table
|
||||
from tests.samples import ACCOUNT_1, ACCOUNT_2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sa_tables():
|
||||
return [messenger_handbook_country_table]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def _enter_data(connection: SAConnection):
|
||||
await connection.execute(messenger_handbook_country_table.insert().values([ACCOUNT_1, ACCOUNT_2]))
|
||||
|
||||
|
||||
async def test_get_info_from_postgresql(test_client: AsyncClient):
|
||||
response = await test_client.get("/test_psql/1")
|
||||
response_json = response.json()
|
||||
|
||||
assert (response.status_code, response_json) == (200, [ACCOUNT_1])
|
||||
Reference in New Issue
Block a user