This commit is contained in:
22
.dockerignore
Normal file
22
.dockerignore
Normal file
@ -0,0 +1,22 @@
|
||||
# Python-generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
|
||||
# uv
|
||||
uv.lock
|
||||
|
||||
# Ruff
|
||||
.ruff_cache
|
||||
|
||||
# env
|
||||
*.env
|
||||
|
||||
# Container
|
||||
container
|
||||
75
.gitea/workflows/latest.yaml
Normal file
75
.gitea/workflows/latest.yaml
Normal file
@ -0,0 +1,75 @@
|
||||
name: Build And Push
|
||||
run-name: ${{ github.actor }} builds and pushes production-ready image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- latest
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUNNER_TOOL_CACHE: /${{ github.workspace }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.miwory.dev
|
||||
username: ${{ secrets.CI_USERNAME }}
|
||||
password: ${{ secrets.CI_TOKEN }}
|
||||
|
||||
- name: Cache uv binary
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/uv
|
||||
key: uv-${{ runner.os }}
|
||||
restore-keys: uv-${{ runner.os }}
|
||||
|
||||
- name: Cache uv dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.cache/uv
|
||||
key: uv-${{ runner.os }}
|
||||
restore-keys: uv-${{ runner.os }}
|
||||
|
||||
- name: Cache pre-commit
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-cache-${{ runner.os }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: pre-commit-cache-${{ runner.os }}-
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: "0.7.8"
|
||||
enable-cache: true
|
||||
cache-local-path: ${{ github.workspace }}/.cache/uv
|
||||
tool-dir: ${{ github.workspace }}/.cache/uv
|
||||
tool-bin-dir: ${{ github.workspace }}/.cache/uv
|
||||
cache-dependency-glob: ""
|
||||
|
||||
- name: Set up Python
|
||||
run: uv python install
|
||||
|
||||
- name: Install the project
|
||||
run: uv sync --no-install-project --cache-dir ${{ github.workspace }}/.cache/uv
|
||||
|
||||
- name: Linter & Formatter
|
||||
run: uv run pre-commit run --all-files
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
target: production
|
||||
push: true
|
||||
tags: "git.miwory.dev/SmartSolutions/HospitalAssistantBackend:latest"
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
22
.gitignore
vendored
Normal file
22
.gitignore
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
# Python-generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
|
||||
# uv
|
||||
uv.lock
|
||||
|
||||
# Ruff
|
||||
.ruff_cache
|
||||
|
||||
# env
|
||||
*.env
|
||||
|
||||
# Container
|
||||
container
|
||||
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@ -0,0 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.31.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/RobertCraigie/pyright-python
|
||||
rev: v1.1.398
|
||||
hooks:
|
||||
- id: pyright
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
- id: check-yaml
|
||||
- id: debug-statements
|
||||
- id: check-merge-conflict
|
||||
- id: double-quote-string-fixer
|
||||
- id: end-of-file-fixer
|
||||
|
||||
- repo: meta
|
||||
hooks:
|
||||
- id: check-hooks-apply
|
||||
- id: check-useless-excludes
|
||||
82
Dockerfile
Normal file
82
Dockerfile
Normal file
@ -0,0 +1,82 @@
|
||||
#################################################
|
||||
FROM debian:bookworm-slim AS builder-base
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
libpq-dev \
|
||||
ca-certificates \
|
||||
libc6 \
|
||||
libstdc++6 \
|
||||
sudo \
|
||||
&& groupadd --gid 1001 appuser \
|
||||
&& useradd --uid 1001 --gid appuser --shell /bin/bash --create-home appuser
|
||||
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
UV_VERSION="0.7.6" \
|
||||
UV_PYTHON="3.13.3" \
|
||||
UV_PYTHON_INSTALL_DIR="/app/.python" \
|
||||
UV_PYTHON_PREFERENCE="only-managed" \
|
||||
UV_COMPILE_BYTECODE=1 \
|
||||
UV_NO_INSTALLER_METADATA=1 \
|
||||
UV_LINK_MODE=copy \
|
||||
PATH="$PATH:/root/.local/bin/:/app/.venv/bin:/opt/cprocsp/bin/amd64:/opt/cprocsp/sbin/amd64"
|
||||
|
||||
# Install CryptoPro CSP 5
|
||||
WORKDIR /tmp/cryptopro
|
||||
COPY packages/linux-amd64_deb.tgz /tmp/cryptopro/
|
||||
RUN tar -xzf linux-amd64_deb.tgz && \
|
||||
cd linux-amd64_deb && \
|
||||
./install.sh && \
|
||||
dpkg -i cprocsp-cptools-*.deb lsb-cprocsp-base_*.deb lsb-cprocsp-kc1_*.deb lsb-cprocsp-capilite_*.deb || apt-get install -f -y && \
|
||||
# Create symbolic links for CryptoPro tools
|
||||
ln -s /opt/cprocsp/bin/amd64/certmgr /bin/certmgr && \
|
||||
ln -s /opt/cprocsp/bin/amd64/cpverify /bin/cpverify && \
|
||||
ln -s /opt/cprocsp/bin/amd64/cryptcp /bin/cryptcp && \
|
||||
ln -s /opt/cprocsp/bin/amd64/csptest /bin/csptest && \
|
||||
ln -s /opt/cprocsp/sbin/amd64/cpconfig /bin/cpconfig && \
|
||||
# Set permissions for CryptoPro directories
|
||||
mkdir -p /etc/opt/cprocsp /var/opt/cprocsp && \
|
||||
chown -R appuser:appuser /etc/opt/cprocsp /var/opt/cprocsp && \
|
||||
# Clean up
|
||||
rm -rf /tmp/cryptopro && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
#################################################
|
||||
FROM builder-base AS python-base
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
curl \
|
||||
clang \
|
||||
&& curl -LsSf https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-installer.sh | sh && \
|
||||
uv python install && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN uv sync --no-dev -n
|
||||
RUN uv version --short > .version
|
||||
|
||||
#################################################
|
||||
FROM builder-base AS production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN chown -R appuser:appuser /app
|
||||
|
||||
COPY --from=python-base /app/.python /app/.python
|
||||
COPY --from=python-base /app/.venv /app/.venv
|
||||
COPY --from=python-base /app/.version /app/
|
||||
COPY /src/ /app/
|
||||
COPY /scripts/ /app/scripts
|
||||
RUN chmod -R 755 /app/scripts
|
||||
|
||||
USER appuser
|
||||
|
||||
CMD ["sh", "./scripts/boot.sh"]
|
||||
#################################################
|
||||
34
docker-compose.yml
Normal file
34
docker-compose.yml
Normal file
@ -0,0 +1,34 @@
|
||||
name: HospitalAssistantAPI
|
||||
|
||||
x-app-common: &app-common
|
||||
build:
|
||||
context: .
|
||||
target: production
|
||||
tty: true
|
||||
restart: unless-stopped
|
||||
stop_signal: SIGINT
|
||||
env_file:
|
||||
- .test.env
|
||||
environment:
|
||||
DEBUG: false
|
||||
DATABASE_URL: "postgresql://postgres:example@db:5432/postgres"
|
||||
REDIS_URL: "redis://valkey:6379/0"
|
||||
volumes:
|
||||
- "./container:/var/opt/cprocsp/keys"
|
||||
|
||||
services:
|
||||
valkey:
|
||||
image: valkey/valkey:alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- ${VALKEY_PORT:-6380}:6379
|
||||
healthcheck:
|
||||
test: [ "CMD", "redis-cli", "ping" ]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
web:
|
||||
<<: *app-common
|
||||
ports:
|
||||
- "${APP_PORT:-6767}:${APP_PORT:-6767}"
|
||||
BIN
packages/linux-amd64_deb.tgz
Normal file
BIN
packages/linux-amd64_deb.tgz
Normal file
Binary file not shown.
121
pyproject.toml
Normal file
121
pyproject.toml
Normal file
@ -0,0 +1,121 @@
|
||||
[project]
|
||||
name = "HospitalAssistantBackend"
|
||||
version = "1.0.0"
|
||||
description = "Backend for Hospital Assistant"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
# Server
|
||||
"fastapi==0.116.1",
|
||||
"gunicorn==23.0.0",
|
||||
"orjson==3.11.3",
|
||||
"redis[hiredis]==6.4.0",
|
||||
"uvicorn-worker==0.3.0",
|
||||
"uvicorn[standard]==0.35.0",
|
||||
# Logging
|
||||
"python-logging-loki==0.3.1",
|
||||
# Requests
|
||||
"httpx==0.28.1",
|
||||
# Database
|
||||
"alembic==1.16.4",
|
||||
"psycopg==3.2.9",
|
||||
"psycopg-c==3.2.9; sys_platform != 'win32'",
|
||||
"asyncpg==0.30.0",
|
||||
"sqlmodel==0.0.24",
|
||||
# Types
|
||||
"pydantic==2.11.7",
|
||||
"pydantic-settings==2.10.1",
|
||||
"pydantic-extra-types==2.10.5",
|
||||
"semver==3.0.4",
|
||||
"pyjwt==2.10.1",
|
||||
# CLI
|
||||
"typer-slim==0.16.1",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"celery-types==0.23.0",
|
||||
"poethepoet==0.34.0",
|
||||
"pre-commit==4.2.0",
|
||||
"psycopg[binary]==3.2.9",
|
||||
"pyright==1.1.401",
|
||||
"ruff==0.11.12",
|
||||
"types-pyjwt==1.7.1",
|
||||
"types-redis==4.6.0.20241004",
|
||||
]
|
||||
|
||||
[tool.poe.tasks]
|
||||
_git = "git add ."
|
||||
_lint = "pre-commit run --all-files"
|
||||
|
||||
lint = ["_git", "_lint"]
|
||||
check = "uv pip ls --outdated"
|
||||
run = "uv run --directory ./src/ server.py"
|
||||
manage = "uv run --directory ./src/ manage.py"
|
||||
migrate = "uv run --directory ./src/ alembic revision --autogenerate"
|
||||
|
||||
[tool.uv]
|
||||
required-version = ">=0.7.0"
|
||||
dependency-metadata = [
|
||||
{ name = "psycopg-c", version = "3.2.9", requires-python = ">=3.8", requires-dist = [
|
||||
"psycopg==3.2.9",
|
||||
] },
|
||||
]
|
||||
[tool.typos.files]
|
||||
extend-exclude = ["**/migrations/versions"]
|
||||
|
||||
[tool.pyright]
|
||||
venvPath = "."
|
||||
venv = ".venv"
|
||||
exclude = ["**/migrations/versions"]
|
||||
strictListInference = true
|
||||
strictDictionaryInference = true
|
||||
strictSetInference = true
|
||||
deprecateTypingAliases = true
|
||||
typeCheckingMode = "strict"
|
||||
pythonPlatform = "All"
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py313"
|
||||
line-length = 79
|
||||
fix = true
|
||||
|
||||
[tool.ruff.lint]
|
||||
preview = true
|
||||
select = [
|
||||
"E",
|
||||
"W",
|
||||
"F",
|
||||
"UP",
|
||||
"A",
|
||||
"B",
|
||||
"C4",
|
||||
"SIM",
|
||||
"I",
|
||||
"S",
|
||||
"G",
|
||||
"FAST",
|
||||
"ASYNC",
|
||||
"BLE",
|
||||
"INT",
|
||||
"ISC",
|
||||
"ICN",
|
||||
"PYI",
|
||||
"INP",
|
||||
"RSE",
|
||||
"PIE",
|
||||
"SLOT",
|
||||
"TID",
|
||||
"LOG",
|
||||
"FBT",
|
||||
"DTZ",
|
||||
"EM",
|
||||
"PERF",
|
||||
"RUF",
|
||||
]
|
||||
ignore = ["RUF029", "S104", "RUF001"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "single"
|
||||
indent-style = "space"
|
||||
docstring-code-format = true
|
||||
9
scripts/boot.sh
Normal file
9
scripts/boot.sh
Normal file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cpconfig -license -set "$CRYPTOPRO_LICENSE"
|
||||
# certmgr -inst -file /var/opt/cprocsp/keys/cert.cer -cont "$CRYPTOPRO_CONTAINER"
|
||||
certmgr -inst -file /var/opt/cprocsp/keys/cert.p7b -cont "$CRYPTOPRO_CONTAINER"
|
||||
|
||||
# python -m alembic upgrade head
|
||||
python server.py
|
||||
6
src/alembic.ini
Normal file
6
src/alembic.ini
Normal file
@ -0,0 +1,6 @@
|
||||
[alembic]
|
||||
file_template = %%(year)d.%%(month).2d.%%(day).2d_%%(hour).2d-%%(minute).2d-%%(second).2d_%%(rev)s
|
||||
script_location = migrations
|
||||
prepend_sys_path = .
|
||||
version_path_separator = os
|
||||
output_encoding = utf-8
|
||||
0
src/apps/esia/__init__.py
Normal file
0
src/apps/esia/__init__.py
Normal file
10
src/apps/esia/scopes.py
Normal file
10
src/apps/esia/scopes.py
Normal file
@ -0,0 +1,10 @@
|
||||
SCOPES = [
|
||||
'openid',
|
||||
'fullname',
|
||||
# 'email',
|
||||
# 'birthdate',
|
||||
# 'gender',
|
||||
# 'snils',
|
||||
# 'id_doc',
|
||||
# 'mobile',
|
||||
]
|
||||
82
src/apps/esia/sign.py
Normal file
82
src/apps/esia/sign.py
Normal file
@ -0,0 +1,82 @@
|
||||
import base64
|
||||
import secrets
|
||||
import subprocess # noqa: S404
|
||||
import tempfile
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from apps.esia.scopes import SCOPES
|
||||
from core.config import settings
|
||||
|
||||
ACCESS_TYPE = 'online'
|
||||
RESPONSE_CODE = 'code'
|
||||
|
||||
|
||||
def csp_sign(data: str):
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
tmp_file_name = secrets.token_hex(8)
|
||||
source_path = Path(tmp_dir) / f'{tmp_file_name}.txt'
|
||||
destination_path = source_path.with_suffix('.txt.sgn')
|
||||
|
||||
with open(source_path, 'w', encoding='utf-8') as f:
|
||||
f.write(data)
|
||||
|
||||
print(data)
|
||||
|
||||
cmd = [
|
||||
'cryptcp',
|
||||
'-signf',
|
||||
'-norev',
|
||||
'-nochain',
|
||||
'-der',
|
||||
'-strict',
|
||||
'-cert',
|
||||
'-detached',
|
||||
'-thumbprint',
|
||||
settings.ESIA_CONTAINER_THUMBPRINT,
|
||||
'-pin',
|
||||
settings.ESIA_CONTAINER_PASSWORD,
|
||||
'-dir',
|
||||
tmp_dir,
|
||||
str(source_path),
|
||||
]
|
||||
|
||||
subprocess.run( # noqa: S603
|
||||
cmd, input=b'y\n', capture_output=True, check=True, text=False
|
||||
)
|
||||
signed_message = destination_path.read_bytes()
|
||||
|
||||
return signed_message
|
||||
|
||||
|
||||
def sign_params(params: dict[str, Any]):
|
||||
plaintext = (
|
||||
params.get('scope', '')
|
||||
+ params.get('timestamp', '')
|
||||
+ params.get('client_id', '')
|
||||
+ params.get('state', '')
|
||||
)
|
||||
|
||||
client_secret = csp_sign(plaintext)
|
||||
return base64.urlsafe_b64encode(client_secret).decode('utf-8')
|
||||
|
||||
|
||||
def get_url():
|
||||
timestamp = datetime.now(UTC).strftime('%Y.%m.%d %H:%M:%S %z').strip()
|
||||
state = str(uuid.uuid4())
|
||||
params = {
|
||||
'client_id': settings.ESIA_CLIENT_ID,
|
||||
'client_secret': '',
|
||||
'redirect_uri': settings.ESIA_REDIRECT_URI,
|
||||
'response_type': RESPONSE_CODE,
|
||||
'state': state,
|
||||
'timestamp': timestamp,
|
||||
'access_type': ACCESS_TYPE,
|
||||
'scope': ' '.join(SCOPES),
|
||||
}
|
||||
params['client_secret'] = sign_params(params)
|
||||
|
||||
return f'{settings.ESIA_BASE_URL}/aas/oauth2/ac?{urlencode(params)}'
|
||||
0
src/apps/esia/v1/__init__.py
Normal file
0
src/apps/esia/v1/__init__.py
Normal file
50
src/apps/esia/v1/router.py
Normal file
50
src/apps/esia/v1/router.py
Normal file
@ -0,0 +1,50 @@
|
||||
import secrets
|
||||
from logging import getLogger
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from apps.esia.sign import get_url
|
||||
from clients import clients as c
|
||||
from shared import exceptions as e
|
||||
from shared.redis import client as cache
|
||||
|
||||
from . import schema as s
|
||||
|
||||
logger = getLogger(__name__)
|
||||
router = APIRouter(
|
||||
prefix='/esia',
|
||||
tags=[
|
||||
'ESIA',
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@router.get('/login', response_model=s.LoginURL)
|
||||
async def login():
|
||||
url = get_url()
|
||||
return s.LoginURL(url=url)
|
||||
|
||||
|
||||
@router.post('/callback')
|
||||
async def callback(code: str):
|
||||
token = None
|
||||
for i in range(3):
|
||||
try:
|
||||
token = await c.esia_api.access_token(code)
|
||||
break
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'Error occurred while accessing ESI API. Retrying...'
|
||||
)
|
||||
if i == 2:
|
||||
raise
|
||||
|
||||
if token is None:
|
||||
raise e.BadRequestException
|
||||
|
||||
await c.esia_api.get_user_info(token.access_token, token.id_token)
|
||||
|
||||
access_token = secrets.token_urlsafe(32)
|
||||
cache.set(access_token, access_token)
|
||||
|
||||
return s.Token(access_token=access_token)
|
||||
9
src/apps/esia/v1/schema.py
Normal file
9
src/apps/esia/v1/schema.py
Normal file
@ -0,0 +1,9 @@
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class LoginURL(TypedDict):
|
||||
url: str
|
||||
|
||||
|
||||
class Token(TypedDict):
|
||||
access_token: str
|
||||
0
src/apps/users/__init__.py
Normal file
0
src/apps/users/__init__.py
Normal file
20
src/apps/users/auth.py
Normal file
20
src/apps/users/auth.py
Normal file
@ -0,0 +1,20 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
|
||||
from shared import exceptions as e
|
||||
from shared.redis import client as cache
|
||||
|
||||
BEARER = HTTPBearer()
|
||||
|
||||
|
||||
async def login(
|
||||
credentials: Annotated[HTTPAuthorizationCredentials, Depends(BEARER)],
|
||||
):
|
||||
is_exist = cache.get(credentials.credentials)
|
||||
|
||||
if is_exist is None:
|
||||
raise e.UnauthorizedException
|
||||
|
||||
return True
|
||||
0
src/apps/users/v1/__init__.py
Normal file
0
src/apps/users/v1/__init__.py
Normal file
1113
src/apps/users/v1/mock.py
Normal file
1113
src/apps/users/v1/mock.py
Normal file
File diff suppressed because it is too large
Load Diff
152
src/apps/users/v1/router.py
Normal file
152
src/apps/users/v1/router.py
Normal file
@ -0,0 +1,152 @@
|
||||
from datetime import datetime
|
||||
from json import dumps
|
||||
from logging import getLogger
|
||||
from typing import Annotated, Any
|
||||
|
||||
from fastapi import APIRouter, Body, Depends, status
|
||||
|
||||
from apps.users.auth import login
|
||||
from shared.redis import client as cache
|
||||
|
||||
from . import mock
|
||||
|
||||
logger = getLogger(__name__)
|
||||
router = APIRouter(
|
||||
prefix='/user',
|
||||
tags=[
|
||||
'User',
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@router.post('/measurement', status_code=status.HTTP_202_ACCEPTED)
|
||||
async def measurement(
|
||||
user: Annotated[str, Depends(login)],
|
||||
ad: Annotated[int, Body()],
|
||||
sd: Annotated[int, Body()],
|
||||
pulse: Annotated[int, Body()],
|
||||
created_at: Annotated[datetime, Body()],
|
||||
comment: Annotated[str, Body()],
|
||||
status: Annotated[str, Body()],
|
||||
):
|
||||
created = created_at.strftime('%Y-%m-%d %H:%M:%S')
|
||||
data = {
|
||||
'ad': ad,
|
||||
'sd': sd,
|
||||
'pulse': pulse,
|
||||
'created_at': created,
|
||||
'comment': comment,
|
||||
'status': status,
|
||||
}
|
||||
cache_key = f'tdn:measurement:{user}:{created}'
|
||||
cache.set(cache_key, dumps(data))
|
||||
return
|
||||
|
||||
|
||||
@router.get('/measurements')
|
||||
async def measurements(user: Annotated[str, Depends(login)],):
|
||||
data = [cache.get(key) for key in cache.keys(f'tdn:measurement:{user}:*')]
|
||||
return data
|
||||
|
||||
|
||||
@router.get('/queue')
|
||||
async def queue(user: Annotated[bool, Depends(login)]):
|
||||
return {
|
||||
'id': 60,
|
||||
'guid': '92b3343d-1cb2-47b2-8497-a37e38b6ba24',
|
||||
'tmk_date': None,
|
||||
'created_at': '2025-04-02 15:21:19.890343',
|
||||
'code_mo': '166502',
|
||||
'mo_name': 'ГАУЗ "ГКБ№7 ИМ. М.Н.САДЫКОВА"',
|
||||
'doctor_spec': '109',
|
||||
'doctor_snils': None,
|
||||
'patient_name': 'Иванов Петр Федорович',
|
||||
'patient_birthday': '1997-03-01',
|
||||
'patient_snils': '099-678-666 12',
|
||||
'patient_policy': None,
|
||||
'patient_phone': '+79123456789',
|
||||
'patient_email': None,
|
||||
'tmk_status': 1,
|
||||
'tmk_status_name': 'Создана',
|
||||
'tmk_cancel_reason': None,
|
||||
'tmk_cancel_reason_name': None,
|
||||
'vks_doctor_link': None,
|
||||
'vks_patient_link': None,
|
||||
'doctor_spec_name': 'врач-терапевт',
|
||||
}
|
||||
|
||||
|
||||
@router.get('/getDepartments')
|
||||
async def get_departments():
|
||||
data: dict[Any, Any] = {}
|
||||
return data
|
||||
|
||||
|
||||
@router.get('/getSpecs')
|
||||
async def get_specs():
|
||||
return mock.specs
|
||||
|
||||
|
||||
@router.get('/findPat')
|
||||
async def find_pat(user: Annotated[str, Depends(login)]):
|
||||
return mock.findpat[0]
|
||||
|
||||
|
||||
@router.get('/getProfile')
|
||||
async def get_profile(user: Annotated[str, Depends(login)]):
|
||||
return mock.profile[0]
|
||||
|
||||
|
||||
@router.get('/getHosps')
|
||||
async def get_hosps():
|
||||
return mock.hosps
|
||||
|
||||
|
||||
@router.get('/getELNS')
|
||||
async def get_elns(user: Annotated[str, Depends(login)]):
|
||||
return mock.elns[0]
|
||||
|
||||
|
||||
@router.get('/getVaccsReport')
|
||||
async def get_vaccs_report(user: Annotated[str, Depends(login)]):
|
||||
return mock.vacs[0]
|
||||
|
||||
|
||||
@router.get('/getDiagnosticResults')
|
||||
async def get_diagnostic_results(user: Annotated[str, Depends(login)]):
|
||||
return mock.diagnosticResults[0]
|
||||
|
||||
|
||||
@router.get('/getCurrHosp')
|
||||
async def get_curr_hosp(user: Annotated[str, Depends(login)]):
|
||||
return mock.currHosp[0]
|
||||
|
||||
|
||||
@router.get('/getPatFLG')
|
||||
async def get_pat_flg(user: Annotated[str, Depends(login)]):
|
||||
return mock.patFLG[0]
|
||||
|
||||
|
||||
@router.get('/getEntries')
|
||||
async def get_entries(user: Annotated[str, Depends(login)]):
|
||||
return mock.entries[0]
|
||||
|
||||
|
||||
@router.get('/getRoutesList')
|
||||
async def get_routes_list(user: Annotated[str, Depends(login)]):
|
||||
return mock.routesList[0]
|
||||
|
||||
|
||||
@router.get('/getMedExamDict')
|
||||
async def get_med_exam_dict(user: Annotated[str, Depends(login)]):
|
||||
return mock.medexamDict
|
||||
|
||||
|
||||
@router.get('/getHospRecommendations')
|
||||
async def get_hosp_recommendations(user: Annotated[str, Depends(login)]):
|
||||
return mock.hospRecommendations
|
||||
|
||||
|
||||
@router.get('/getHospRoutes')
|
||||
async def get_hosp_routes(user: Annotated[str, Depends(login)]):
|
||||
return mock.hospRoutes
|
||||
15
src/clients/__init__.py
Normal file
15
src/clients/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
from .esia.api import ESIA_API
|
||||
|
||||
|
||||
class ClientsObject:
|
||||
_esia_api = None
|
||||
|
||||
@property
|
||||
def esia_api(self):
|
||||
if not self._esia_api:
|
||||
self._esia_api = ESIA_API()
|
||||
|
||||
return self._esia_api
|
||||
|
||||
|
||||
clients = ClientsObject()
|
||||
4
src/clients/esia/__init__.py
Normal file
4
src/clients/esia/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
from httpx import AsyncClient
|
||||
|
||||
|
||||
class TMKClient(AsyncClient): ...
|
||||
71
src/clients/esia/api.py
Normal file
71
src/clients/esia/api.py
Normal file
@ -0,0 +1,71 @@
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from logging import getLogger
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from fastapi import status as st
|
||||
from httpx import AsyncClient
|
||||
|
||||
from apps.esia.scopes import SCOPES
|
||||
from apps.esia.sign import sign_params
|
||||
from core.config import settings
|
||||
from shared import exceptions as e
|
||||
|
||||
from . import schema as s
|
||||
|
||||
|
||||
class ESIA_API(AsyncClient):
|
||||
def __init__(self):
|
||||
self.logger = getLogger(__name__)
|
||||
super().__init__(base_url=settings.ESIA_BASE_URL)
|
||||
|
||||
async def sign_request(self, data: dict[str, Any]):
|
||||
timestamp = datetime.now(UTC).strftime('%Y.%m.%d %H:%M:%S %z').strip()
|
||||
state = str(uuid.uuid4())
|
||||
params = {
|
||||
'client_id': settings.ESIA_CLIENT_ID,
|
||||
'timestamp': timestamp,
|
||||
'state': state,
|
||||
'scope': ' '.join(SCOPES),
|
||||
}
|
||||
params.update(data)
|
||||
params['client_secret'] = sign_params(params)
|
||||
|
||||
return params
|
||||
|
||||
async def access_token(self, code: str):
|
||||
params = {
|
||||
'grant_type': 'authorization_code',
|
||||
'redirect_uri': settings.ESIA_REDIRECT_URI,
|
||||
'code': code,
|
||||
}
|
||||
signed_params = await self.sign_request(params)
|
||||
res = await self.post('/aas/oauth2/te', data=signed_params)
|
||||
|
||||
match res.status_code:
|
||||
case st.HTTP_200_OK:
|
||||
return s.AccessTokenModel.model_validate(res.json())
|
||||
case st.HTTP_400_BAD_REQUEST:
|
||||
return None
|
||||
|
||||
case _:
|
||||
self.logger.error(res.json())
|
||||
raise e.UnknownException
|
||||
|
||||
async def get_user_info(self, access_token: str, id_token: str):
|
||||
IDToken = s.IDTokenModel.model_validate(
|
||||
jwt.decode(id_token, options={'verify_signature': False})
|
||||
)
|
||||
res = await self.get(
|
||||
f'/rs/prns/{IDToken.urn_esia_sbj.oid}',
|
||||
headers={'Authorization': f'Bearer {access_token}'},
|
||||
)
|
||||
|
||||
match res.status_code:
|
||||
case st.HTTP_200_OK:
|
||||
return s.UserInfoModel.model_validate(res.json())
|
||||
|
||||
case _:
|
||||
self.logger.error(res.json())
|
||||
raise e.UnknownException
|
||||
60
src/clients/esia/schema.py
Normal file
60
src/clients/esia/schema.py
Normal file
@ -0,0 +1,60 @@
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field, PositiveInt
|
||||
|
||||
|
||||
class AccessTokenModel(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
state: str
|
||||
id_token: str
|
||||
token_type: Literal['Bearer']
|
||||
expires_in: PositiveInt
|
||||
|
||||
|
||||
class IDTokenACRModel(BaseModel):
|
||||
twoAF: str = Field(alias='2fa')
|
||||
|
||||
|
||||
class IDTokenSBJModel(BaseModel):
|
||||
lvl: str = Field(alias='urn:esia:sbj:lvl')
|
||||
typ: str = Field(alias='urn:esia:sbj:typ')
|
||||
is_tru: bool = Field(alias='urn:esia:sbj:is_tru')
|
||||
oid: int = Field(alias='urn:esia:sbj:oid')
|
||||
name: str = Field(alias='urn:esia:sbj:nam')
|
||||
|
||||
|
||||
class IDTokenModel(BaseModel):
|
||||
aud: str
|
||||
sub: int
|
||||
nbf: int
|
||||
amr: str
|
||||
auth_time: int
|
||||
exp: int
|
||||
iat: int
|
||||
iss: str
|
||||
# acr: IDTokenACRModel
|
||||
urn_esia_amd: str = Field(alias='urn:esia:amd')
|
||||
urn_esia_sid: str = Field(alias='urn:esia:sid')
|
||||
urn_esia_sbj: IDTokenSBJModel = Field(alias='urn:esia:sbj')
|
||||
|
||||
|
||||
class UserInfoModel(BaseModel):
|
||||
stateFacts: list[str]
|
||||
firstName: str
|
||||
lastName: str
|
||||
# middleName: str
|
||||
# birthDate: str
|
||||
# gender: str
|
||||
trusted: bool
|
||||
# citizenship: str
|
||||
snils: str
|
||||
inn: int
|
||||
updatedOn: int
|
||||
rfgUOperatorCheck: bool
|
||||
status: str
|
||||
verifying: bool
|
||||
rIdDoc: int
|
||||
containsUpCfmCode: bool
|
||||
kidAccCreatedByParent: bool
|
||||
eTag: str
|
||||
0
src/core/__init__.py
Normal file
0
src/core/__init__.py
Normal file
62
src/core/config.py
Normal file
62
src/core/config.py
Normal file
@ -0,0 +1,62 @@
|
||||
from os import environ
|
||||
from os.path import exists
|
||||
|
||||
from pydantic import Field, model_validator
|
||||
from pydantic_extra_types.semantic_version import SemanticVersion
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
def get_version():
|
||||
if exists('.version'):
|
||||
with open('.version', encoding='utf-8') as f:
|
||||
return SemanticVersion.parse(f.read().strip())
|
||||
|
||||
return SemanticVersion.parse('0.0.0')
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(
|
||||
env_file='.env',
|
||||
validate_default=False,
|
||||
extra='ignore',
|
||||
)
|
||||
|
||||
# App info
|
||||
APP_NAME: str = 'Hospital Assistant API'
|
||||
APP_DESCRIPTION: str = 'API for the Hospital Assistant'
|
||||
APP_PORT: int = Field(default=6767)
|
||||
VERSION: SemanticVersion = Field(default_factory=get_version)
|
||||
DEBUG: bool = Field(default=False)
|
||||
|
||||
# Security
|
||||
SECRET_KEY: str = Field(default='secret')
|
||||
ALGORITHM: str = 'HS256'
|
||||
|
||||
# Database
|
||||
DATABASE_URL: str = Field(default='sqlite:///sql.db')
|
||||
|
||||
# Redis
|
||||
REDIS_URL: str = Field(default='redis://localhost:6379/0')
|
||||
|
||||
# Loki Logging
|
||||
LOKI_URL: str | None = Field(default=None)
|
||||
|
||||
# Environment
|
||||
TMK_BASE_URL: str = Field(default='https://tmk-api.tatar.ru/api')
|
||||
|
||||
# ESIA
|
||||
ESIA_BASE_URL: str = Field(default='https://esia.gosuslugi.ru')
|
||||
ESIA_CLIENT_ID: str = Field(default='')
|
||||
ESIA_REDIRECT_URI: str = Field(default='')
|
||||
ESIA_CONTAINER_PASSWORD: str = Field(default='')
|
||||
ESIA_CONTAINER_THUMBPRINT: str = Field(default='')
|
||||
|
||||
@model_validator(mode='after')
|
||||
def celery_env(self):
|
||||
environ['CELERY_BROKER_URL'] = self.REDIS_URL
|
||||
environ['CELERY_RESULT_BACKEND'] = self.REDIS_URL
|
||||
|
||||
return self
|
||||
|
||||
|
||||
settings = Settings()
|
||||
54
src/core/exceptions.py
Normal file
54
src/core/exceptions.py
Normal file
@ -0,0 +1,54 @@
|
||||
from logging import getLogger
|
||||
|
||||
from fastapi import FastAPI, Request, Response, status
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.exceptions import (
|
||||
RequestValidationError,
|
||||
WebSocketRequestValidationError,
|
||||
)
|
||||
from fastapi.responses import ORJSONResponse
|
||||
from fastapi.utils import is_body_allowed_for_status_code
|
||||
from fastapi.websockets import WebSocket
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
logger = getLogger(__name__)
|
||||
logger_format = '%s: %s'
|
||||
|
||||
|
||||
def register_exceptions(app: FastAPI):
|
||||
@app.exception_handler(HTTPException)
|
||||
async def http_exception_handler(request: Request, exc: HTTPException): # type: ignore
|
||||
headers = getattr(exc, 'headers', None)
|
||||
if not is_body_allowed_for_status_code(exc.status_code):
|
||||
return Response(status_code=exc.status_code, headers=headers)
|
||||
return ORJSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={'detail': exc.detail},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
async def validation_exception_handler( # type: ignore
|
||||
request: Request,
|
||||
exc: RequestValidationError,
|
||||
):
|
||||
logger.warning(logger_format, 'Validation Error', exc.body)
|
||||
return ORJSONResponse(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
content=jsonable_encoder({'detail': exc.errors()}),
|
||||
)
|
||||
|
||||
@app.exception_handler(WebSocketRequestValidationError)
|
||||
async def websocket_validation_exception_handler( # type: ignore
|
||||
websocket: WebSocket,
|
||||
exc: WebSocketRequestValidationError,
|
||||
):
|
||||
logger.warning(
|
||||
logger_format, 'WebSocket Validation Error', exc.errors()
|
||||
)
|
||||
return await websocket.close(
|
||||
code=status.WS_1008_POLICY_VIOLATION,
|
||||
reason=jsonable_encoder(exc.errors()),
|
||||
)
|
||||
|
||||
return app
|
||||
67
src/core/log.py
Normal file
67
src/core/log.py
Normal file
@ -0,0 +1,67 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from logging_loki import LokiHandler as Loki # type: ignore
|
||||
|
||||
from core.config import settings
|
||||
|
||||
|
||||
class LokiHandler(Loki):
|
||||
def __init__(self):
|
||||
if not settings.LOKI_URL:
|
||||
msg = 'LOKI_URL is not set'
|
||||
raise ValueError(msg)
|
||||
|
||||
super().__init__( # type: ignore
|
||||
settings.LOKI_URL,
|
||||
tags={
|
||||
'application': settings.APP_NAME,
|
||||
'version': str(settings.VERSION),
|
||||
},
|
||||
version='1',
|
||||
)
|
||||
|
||||
|
||||
class Config:
|
||||
def __init__(self):
|
||||
self.version = 1
|
||||
self.disable_existing_loggers = False
|
||||
self.handlers = self._get_handlers()
|
||||
self.loggers = self._get_loggers()
|
||||
|
||||
@staticmethod
|
||||
def _get_handlers():
|
||||
handlers: dict[str, Any] = {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
'level': logging.INFO,
|
||||
'stream': 'ext://sys.stderr',
|
||||
}
|
||||
}
|
||||
|
||||
if settings.LOKI_URL:
|
||||
handlers['loki'] = {'class': LokiHandler}
|
||||
|
||||
return handlers
|
||||
|
||||
def _get_loggers(self):
|
||||
loggers = {
|
||||
'': {
|
||||
'level': logging.INFO,
|
||||
'handlers': list(self.handlers.keys()),
|
||||
'propagate': False,
|
||||
},
|
||||
}
|
||||
|
||||
return loggers
|
||||
|
||||
def render(self):
|
||||
return {
|
||||
'version': self.version,
|
||||
'disable_existing_loggers': self.disable_existing_loggers,
|
||||
'handlers': self.handlers,
|
||||
'loggers': self.loggers,
|
||||
}
|
||||
|
||||
|
||||
config = Config().render()
|
||||
29
src/core/main.py
Normal file
29
src/core/main.py
Normal file
@ -0,0 +1,29 @@
|
||||
from logging import getLogger
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import ORJSONResponse
|
||||
|
||||
# from database import lifespan
|
||||
from middlewares import register_middlewares
|
||||
|
||||
from .config import settings
|
||||
from .exceptions import register_exceptions
|
||||
from .routers.v1 import router as v1_router
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
app = FastAPI(
|
||||
debug=settings.DEBUG,
|
||||
title=settings.APP_NAME,
|
||||
description=settings.APP_DESCRIPTION,
|
||||
version=str(settings.VERSION),
|
||||
openapi_url=None,
|
||||
default_response_class=ORJSONResponse,
|
||||
# lifespan=lifespan,
|
||||
docs_url=None,
|
||||
redoc_url=None,
|
||||
)
|
||||
|
||||
app = register_middlewares(app)
|
||||
app = register_exceptions(app)
|
||||
app.include_router(v1_router)
|
||||
40
src/core/routers/__init__.py
Normal file
40
src/core/routers/__init__.py
Normal file
@ -0,0 +1,40 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi.openapi.docs import get_swagger_ui_html
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
from fastapi.responses import ORJSONResponse
|
||||
|
||||
from core.config import settings
|
||||
|
||||
|
||||
def get_openapi_schema(router: APIRouter):
|
||||
# if not settings.DEBUG:
|
||||
# return None
|
||||
|
||||
return ORJSONResponse(
|
||||
get_openapi(
|
||||
title=settings.APP_NAME,
|
||||
version=str(settings.VERSION),
|
||||
description=settings.APP_DESCRIPTION,
|
||||
routes=router.routes,
|
||||
servers=[
|
||||
{
|
||||
'url': '/',
|
||||
'description': 'Development environment',
|
||||
},
|
||||
{
|
||||
'url': 'https://med-assistant-api.tatar.ru/',
|
||||
'description': 'Production environment',
|
||||
},
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def get_swagger_html(router: APIRouter):
|
||||
# if not settings.DEBUG:
|
||||
# return None
|
||||
|
||||
return get_swagger_ui_html(
|
||||
openapi_url=f'{router.prefix}/openapi.json',
|
||||
title='Docs',
|
||||
)
|
||||
30
src/core/routers/v1.py
Normal file
30
src/core/routers/v1.py
Normal file
@ -0,0 +1,30 @@
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from apps.esia.v1.router import router as esia_router
|
||||
from apps.users.v1.router import router as users_router
|
||||
|
||||
from . import get_openapi_schema, get_swagger_html
|
||||
|
||||
router = APIRouter(prefix='/v1')
|
||||
|
||||
router.include_router(esia_router)
|
||||
router.include_router(users_router)
|
||||
|
||||
openapi_schema = get_openapi_schema(router)
|
||||
swagger_ui_html = get_swagger_html(router)
|
||||
|
||||
|
||||
@router.get('/openapi.json', include_in_schema=False)
|
||||
async def openapi():
|
||||
if openapi_schema is None:
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return openapi_schema
|
||||
|
||||
|
||||
@router.get('/docs', include_in_schema=False)
|
||||
async def docs():
|
||||
if swagger_ui_html is None:
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return swagger_ui_html
|
||||
34
src/database/__init__.py
Normal file
34
src/database/__init__.py
Normal file
@ -0,0 +1,34 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from logging import getLogger
|
||||
from typing import Annotated
|
||||
|
||||
from alembic.command import upgrade
|
||||
from alembic.config import Config
|
||||
from fastapi import Depends, FastAPI
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlmodel import Session
|
||||
|
||||
from core.config import settings
|
||||
from database.manager import DBManager
|
||||
|
||||
logger = getLogger(__name__)
|
||||
db_manager = DBManager(settings.DATABASE_URL)
|
||||
|
||||
SyncSessionDep = Annotated[Session, Depends(db_manager.sync_session)]
|
||||
AsyncSessionDep = Annotated[
|
||||
AsyncSession,
|
||||
Depends(db_manager.async_session),
|
||||
]
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
log_format = '%s: %s'
|
||||
logger.info(log_format, 'App Name', settings.APP_NAME)
|
||||
logger.info(log_format, 'App Description', settings.APP_DESCRIPTION)
|
||||
logger.info(log_format, 'App Version', settings.VERSION)
|
||||
|
||||
config = Config('alembic.ini')
|
||||
upgrade(config, 'head')
|
||||
|
||||
yield
|
||||
41
src/database/manager.py
Normal file
41
src/database/manager.py
Normal file
@ -0,0 +1,41 @@
|
||||
from contextlib import asynccontextmanager, contextmanager
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlmodel import Session, create_engine
|
||||
|
||||
|
||||
class DBManager:
|
||||
sync_url = ''
|
||||
async_url = ''
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.sync_url, self.async_url = self._initialize_urls(database_url)
|
||||
|
||||
self.sync_engine = create_engine(self.sync_url)
|
||||
self.async_engine = create_async_engine(self.async_url)
|
||||
|
||||
def _initialize_urls(self, database_url: str):
|
||||
url_parts = database_url.split('://')
|
||||
|
||||
return (
|
||||
f'postgresql+psycopg://{url_parts[1]}',
|
||||
f'postgresql+asyncpg://{url_parts[1]}',
|
||||
)
|
||||
|
||||
def sync_session(self):
|
||||
with Session(self.sync_engine) as session:
|
||||
yield session
|
||||
|
||||
async def async_session(self):
|
||||
async with AsyncSession(self.async_engine) as session:
|
||||
yield session
|
||||
|
||||
@contextmanager
|
||||
def sync_context_session(self):
|
||||
with Session(self.sync_engine) as session:
|
||||
yield session
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_context_session(self):
|
||||
async with AsyncSession(self.async_engine) as session:
|
||||
yield session
|
||||
20
src/middlewares/__init__.py
Normal file
20
src/middlewares/__init__.py
Normal file
@ -0,0 +1,20 @@
|
||||
from logging import getLogger
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from .access_log_middleware import AccessLogMiddleware
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
def register_middlewares(app: FastAPI):
|
||||
app.add_middleware(AccessLogMiddleware)
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=['*'],
|
||||
allow_methods=['*'],
|
||||
allow_headers=['*'],
|
||||
)
|
||||
|
||||
return app
|
||||
108
src/middlewares/access_log_middleware.py
Normal file
108
src/middlewares/access_log_middleware.py
Normal file
@ -0,0 +1,108 @@
|
||||
from logging import getLogger
|
||||
from re import findall
|
||||
from time import perf_counter
|
||||
|
||||
from starlette.types import ASGIApp, Message, Receive, Scope, Send
|
||||
|
||||
from core.config import settings
|
||||
|
||||
LOCALHOST = '127.0.0.1'
|
||||
BROWSERS = {
|
||||
'firefox': 'Firefox',
|
||||
'yabrowser': 'Yandex',
|
||||
'samsungbrowser': 'Samsung Internet',
|
||||
'trident': 'Internet Explorer',
|
||||
'opera': 'Opera',
|
||||
'vivaldi': 'Vivaldi',
|
||||
'brave': 'Brave',
|
||||
'edg': 'Edge',
|
||||
'chrome': 'Chrome',
|
||||
'safari': 'Safari',
|
||||
'chromium': 'Chromium',
|
||||
'msie': 'Internet Explorer',
|
||||
}
|
||||
|
||||
|
||||
class AccessLogMiddleware:
|
||||
def __init__(self, app: ASGIApp):
|
||||
self.app = app
|
||||
self.logger = getLogger(__name__)
|
||||
|
||||
self.version = (
|
||||
b'Version',
|
||||
f'{settings.VERSION}'.encode(),
|
||||
)
|
||||
|
||||
async def detect_browser(self, headers: dict[bytes, bytes]):
|
||||
if b'user-agent' not in headers:
|
||||
return 'unknown'
|
||||
|
||||
user_agent = headers[b'user-agent'].decode().lower()
|
||||
|
||||
for k, v in BROWSERS.items():
|
||||
if findall(k, user_agent):
|
||||
return v
|
||||
|
||||
return 'unknown'
|
||||
|
||||
@staticmethod
|
||||
async def get_client_ip(
|
||||
headers: dict[bytes, bytes],
|
||||
default_ip: str = LOCALHOST,
|
||||
):
|
||||
if b'x-forwarded-for' not in headers:
|
||||
return default_ip
|
||||
|
||||
ips = headers[b'x-forwarded-for'].decode().split(',')
|
||||
|
||||
if len(ips) > 1:
|
||||
return ips[-1].strip()
|
||||
|
||||
return ips[0]
|
||||
|
||||
async def __call__(self, scope: Scope, receive: Receive, send: Send):
|
||||
if scope['type'] != 'http':
|
||||
return await self.app(scope, receive, send)
|
||||
|
||||
start_time = perf_counter()
|
||||
|
||||
async def send_wrapper(message: Message) -> None:
|
||||
if message['type'] != 'http.response.start':
|
||||
return await send(message)
|
||||
|
||||
headers = dict(scope.get('headers', []))
|
||||
|
||||
client_ip = await self.get_client_ip(headers, scope['client'][0])
|
||||
browser = await self.detect_browser(headers)
|
||||
|
||||
response_time = (perf_counter() - start_time) * 1000
|
||||
response_data = f'dur={response_time:.2f}'
|
||||
response = (
|
||||
b'Server-Timing',
|
||||
f'resp;{response_data};desc="Response Time"'.encode(),
|
||||
)
|
||||
|
||||
message['headers'] = message['headers'] + [response, self.version]
|
||||
|
||||
self.logger.info(
|
||||
'%s - %s %s %d [%0.2fms]',
|
||||
client_ip,
|
||||
scope['method'],
|
||||
scope['path'],
|
||||
message['status'],
|
||||
response_time,
|
||||
extra={
|
||||
'tags': {
|
||||
'method': scope['method'],
|
||||
'path': scope['path'],
|
||||
'status': message['status'],
|
||||
'response_time': response_time,
|
||||
'client_ip': client_ip,
|
||||
'browser': browser,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
await send(message)
|
||||
|
||||
return await self.app(scope, receive, send_wrapper)
|
||||
1
src/migrations/README
Normal file
1
src/migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
0
src/migrations/__init__.py
Normal file
0
src/migrations/__init__.py
Normal file
53
src/migrations/env.py
Normal file
53
src/migrations/env.py
Normal file
@ -0,0 +1,53 @@
|
||||
from logging.config import dictConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from core.log import config as log_config
|
||||
from database import db_manager
|
||||
|
||||
dictConfig(log_config)
|
||||
|
||||
|
||||
config = context.config
|
||||
url = db_manager.sync_url
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={'paramstyle': 'named'},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
cfg = config.get_section(config.config_ini_section, {})
|
||||
cfg['sqlalchemy.url'] = url
|
||||
connectable = engine_from_config(
|
||||
cfg,
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
src/migrations/script.py.mako
Normal file
28
src/migrations/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
22
src/server.py
Normal file
22
src/server.py
Normal file
@ -0,0 +1,22 @@
|
||||
from uvicorn import Config, Server
|
||||
|
||||
from core.config import settings
|
||||
from core.log import config as log_config
|
||||
|
||||
|
||||
def main():
|
||||
config = Config(
|
||||
'core.main:app',
|
||||
host='0.0.0.0',
|
||||
port=settings.APP_PORT,
|
||||
log_config=log_config,
|
||||
log_level='info',
|
||||
reload=settings.DEBUG,
|
||||
access_log=False,
|
||||
)
|
||||
server = Server(config)
|
||||
server.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
0
src/shared/__init__.py
Normal file
0
src/shared/__init__.py
Normal file
66
src/shared/exceptions.py
Normal file
66
src/shared/exceptions.py
Normal file
@ -0,0 +1,66 @@
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class BasicException(HTTPException):
|
||||
base_status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
base_detail: str = 'Something went wrong'
|
||||
|
||||
def __init__(
|
||||
self, *, status_code: int | None = None, detail: str | None = None
|
||||
):
|
||||
status_code = status_code or self.base_status_code
|
||||
detail = detail or self.base_detail
|
||||
super().__init__(status_code=status_code, detail=detail)
|
||||
|
||||
@classmethod
|
||||
def description(cls, detail: str | None = None):
|
||||
return {
|
||||
'description': detail or cls.base_detail,
|
||||
'content': {
|
||||
'application/json': {
|
||||
'example': {
|
||||
'detail': detail or cls.base_detail,
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class BadRequestException(BasicException):
|
||||
base_status_code: int = status.HTTP_400_BAD_REQUEST
|
||||
base_detail: str = 'Bad Request'
|
||||
|
||||
|
||||
class UnauthorizedException(BasicException):
|
||||
base_status_code: int = status.HTTP_401_UNAUTHORIZED
|
||||
base_detail: str = 'Unauthorized'
|
||||
|
||||
|
||||
class ForbiddenException(BasicException):
|
||||
base_status_code: int = status.HTTP_403_FORBIDDEN
|
||||
base_detail: str = 'Forbidden'
|
||||
|
||||
|
||||
class NotFoundException(BasicException):
|
||||
base_status_code: int = status.HTTP_404_NOT_FOUND
|
||||
base_detail: str = 'Not Found'
|
||||
|
||||
|
||||
class ConflictException(BasicException):
|
||||
base_status_code: int = status.HTTP_409_CONFLICT
|
||||
base_detail: str = 'Conflict'
|
||||
|
||||
|
||||
class TooManyRequestsException(BasicException):
|
||||
base_status_code: int = status.HTTP_429_TOO_MANY_REQUESTS
|
||||
base_detail: str = 'Too Many Requests'
|
||||
|
||||
|
||||
class InternalServerErrorException(BasicException):
|
||||
base_status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
base_detail: str = 'Internal Server Error'
|
||||
|
||||
|
||||
class UnknownException(BasicException):
|
||||
base_status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
base_detail: str = 'Unknown error'
|
||||
5
src/shared/redis.py
Normal file
5
src/shared/redis.py
Normal file
@ -0,0 +1,5 @@
|
||||
from redis import Redis
|
||||
|
||||
from core.config import settings
|
||||
|
||||
client = Redis.from_url(settings.REDIS_URL)
|
||||
Reference in New Issue
Block a user