first commit

This commit is contained in:
2025-06-08 15:42:25 +03:00
commit 752a775386
49 changed files with 1754 additions and 0 deletions

28
.dockerignore Normal file
View File

@ -0,0 +1,28 @@
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# Virtual environments
.venv
# UV
uv.lock
# Environment variables
.env
# Ruff
.ruff_cache
# Pytest
.pytest_cache
# SQLite
*.db
# Postgres
postgres

21
.env.example Normal file
View File

@ -0,0 +1,21 @@
# Port
APP_PORT=9000 # Optional
# Application
SECRET_KEY=secret_key
# Database
DATABASE_URL=postgresql://postgres:example@localhost:5432/postgres
POSTGRES_PORT=5432 # Optional
POSTGRES_DATA=./postgres # Optional
# Valkey
VALKEY_URL=redis://localhost:6379/0
VALKEY_PORT=6379 # Optional
# VK
VK_CLIENT_ID=
VK_REDIRECT_URI=
# Logging (Optional)
LOKI_URL=

28
.gitignore vendored Normal file
View File

@ -0,0 +1,28 @@
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# Virtual environments
.venv
# UV
uv.lock
# Environment variables
.env
# Ruff
.ruff_cache
# Pytest
.pytest_cache
# SQLite
*.db
# Postgres
postgres

34
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,34 @@
repos:
- repo: https://github.com/crate-ci/typos
rev: v1.31.1
hooks:
- id: typos
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.2
hooks:
- id: ruff
args: [ --fix ]
- id: ruff-format
- repo: https://github.com/RobertCraigie/pyright-python
rev: v1.1.398
hooks:
- id: pyright
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: check-docstring-first
- id: check-added-large-files
- id: check-yaml
- id: debug-statements
- id: check-merge-conflict
- id: double-quote-string-fixer
- id: end-of-file-fixer
- repo: meta
hooks:
- id: check-hooks-apply
- id: check-useless-excludes

53
Dockerfile Normal file
View File

@ -0,0 +1,53 @@
#################################################
FROM debian:bookworm-slim AS builder-base
RUN apt-get update && \
apt-get install --no-install-recommends -y \
libpq-dev \
ca-certificates \
&& groupadd --gid 1001 appuser \
&& useradd --uid 1001 --gid appuser --shell /bin/bash --create-home appuser
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
UV_VERSION="0.7.12" \
UV_PYTHON="3.13.4" \
UV_PYTHON_INSTALL_DIR="/app/.python" \
UV_PYTHON_PREFERENCE="only-managed" \
UV_COMPILE_BYTECODE=1 \
UV_NO_INSTALLER_METADATA=1 \
UV_LINK_MODE=copy \
PATH="$PATH:/root/.local/bin/:/app/.venv/bin"
#################################################
FROM builder-base AS python-base
WORKDIR /app
RUN apt-get install --no-install-recommends -y \
curl \
clang \
&& curl -LsSf https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-installer.sh | sh \
&& uv python install
COPY pyproject.toml ./
RUN uv sync --no-dev -n
RUN uv version --short > .version
#################################################
FROM builder-base AS production
WORKDIR /app
RUN chown -R appuser:appuser /app
COPY --from=python-base /app/.python /app/.python
COPY --from=python-base /app/.venv /app/.venv
COPY --from=python-base /app/.version /app/
COPY /src/ /app/
COPY /scripts/ /app/scripts
RUN chmod -R 755 /app/scripts
USER appuser
CMD ["sh", "./scripts/boot.sh"]
#################################################

0
README.md Normal file
View File

81
docker-compose.yml Normal file
View File

@ -0,0 +1,81 @@
name: VKResenderAPI
x-app-common: &app-common
build:
context: .
target: production
tty: true
restart: unless-stopped
stop_signal: SIGINT
env_file:
- .env
environment:
DEBUG: true
DATABASE_URL: "postgresql://postgres:example@pgbouncer:5432/postgres"
REDIS_URL: "redis://valkey:6379/0"
depends_on:
pgbouncer:
condition: service_healthy
valkey:
condition: service_healthy
services:
db:
image: postgres:17.2-alpine
restart: unless-stopped
ports:
- ${POSTGRES_PORT:-5432}:5432
environment:
POSTGRES_PASSWORD: example
volumes:
- "${POSTGRES_DATA:-./postgres}:/var/lib/postgresql/data/"
healthcheck:
test: [ "CMD-SHELL", "pg_isready -U postgres" ]
interval: 5s
timeout: 10s
retries: 5
pgbouncer:
image: edoburu/pgbouncer:latest
restart: unless-stopped
environment:
- POOL_MODE=transaction
- MAX_DB_CONNECTIONS=3000
- DEFAULT_POOL_SIZE=100
- AUTH_TYPE=md5
- AUTH_FILE=/etc/pgbouncer/userlist.txt
- DATABASE_URL=postgresql://postgres:example@db:5432/postgres
volumes:
- ./userlist.txt:/etc/pgbouncer/userlist.txt
healthcheck:
test: ["CMD", "pg_isready", "-h", "127.0.0.1", "-p", "5432"]
interval: 10s
timeout: 5s
retries: 3
depends_on:
db:
condition: service_healthy
valkey:
image: valkey/valkey:alpine
restart: unless-stopped
ports:
- ${VALKEY_PORT:-6379}:6379
healthcheck:
test: [ "CMD", "redis-cli", "ping" ]
interval: 5s
timeout: 10s
retries: 5
web:
<<: *app-common
ports:
- "${APP_PORT:-9000}:${APP_PORT:-9000}"
worker:
<<: *app-common
command: celery -A core.worker worker --autoscale=0,4 -E --loglevel=info
beat:
<<: *app-common
command: celery -A core.worker beat --loglevel=info

123
pyproject.toml Normal file
View File

@ -0,0 +1,123 @@
[project]
name = "VKResenderAPI"
version = "0.1.0"
description = "API for the VK Resender"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
# Server
"celery==5.5.3",
"fastapi==0.115.12",
"gunicorn==23.0.0",
"uvicorn-worker==0.3.0",
"uvicorn[standard]==0.34.3",
"orjson==3.10.18",
"redis[hiredis]==6.2.0",
# Logging & Metrics
"python-logging-loki==0.3.1",
# Requests
"httpx==0.28.1",
"aiohttp[speedups]==3.12.11",
# Database
"alembic==1.16.1",
"asyncpg==0.30.0",
"psycopg==3.2.9",
"psycopg-c==3.2.9; sys_platform != 'win32'",
"sqlmodel==0.0.24",
# Types
"pydantic[email]==2.11.5",
"pydantic-extra-types==2.10.5",
"pydantic-settings==2.9.1",
"semver==3.0.4",
# CLI
"typer-slim==0.16.0",
]
[dependency-groups]
dev = [
"celery-types==0.23.0",
"poethepoet==0.34.0",
"pre-commit==4.2.0",
"psycopg[binary]==3.2.9",
"pyright==1.1.401",
"ruff==0.11.13",
"types-pyjwt==1.7.1",
"types-redis==4.6.0.20241004",
]
[tool.poe.tasks]
_git = "git add ."
_lint = "pre-commit run --all-files"
lint = ["_git", "_lint"]
check = "uv pip ls --outdated"
run = "uv run --directory ./src/ server.py"
manage = "uv run --directory ./src/ manage.py"
migrate = "uv run --directory ./src/ alembic revision --autogenerate"
[tool.uv]
required-version = ">=0.7.0"
dependency-metadata = [
{ name = "psycopg-c", version = "3.2.9", python = ">=3.8", requires = [
"psycopg==3.2.9",
] },
]
[tool.typos.files]
extend-exclude = ["**/migrations/versions"]
[tool.pyright]
venvPath = "."
venv = ".venv"
exclude = ["**/migrations/versions"]
strictListInference = true
strictDictionaryInference = true
strictSetInference = true
deprecateTypingAliases = true
typeCheckingMode = "strict"
pythonPlatform = "All"
[tool.ruff]
target-version = "py313"
line-length = 79
fix = true
[tool.ruff.lint]
preview = true
select = [
"E",
"W",
"F",
"UP",
"A",
"B",
"C4",
"SIM",
"I",
"S",
"G",
"FAST",
"ASYNC",
"BLE",
"INT",
"ISC",
"ICN",
"PYI",
"INP",
"RSE",
"PIE",
"SLOT",
"TID",
"LOG",
"FBT",
"DTZ",
"EM",
"PERF",
"RUF",
]
ignore = ["RUF029", "S104"]
[tool.ruff.format]
quote-style = "single"
indent-style = "space"
docstring-code-format = true

4
scripts/boot.sh Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
python -m alembic upgrade head
python -m gunicorn

6
src/alembic.ini Normal file
View File

@ -0,0 +1,6 @@
[alembic]
file_template = %%(year)d.%%(month).2d.%%(day).2d_%%(hour).2d-%%(minute).2d-%%(second).2d_%%(rev)s
script_location = migrations
prepend_sys_path = .
version_path_separator = os
output_encoding = utf-8

0
src/apps/__init__.py Normal file
View File

0
src/apps/vk/__init__.py Normal file
View File

1
src/apps/vk/constants.py Normal file
View File

@ -0,0 +1 @@
SCOPES = ['wall', 'groups', 'photos']

22
src/apps/vk/models.py Normal file
View File

@ -0,0 +1,22 @@
from datetime import datetime
from sqlmodel import Field, SQLModel
class User(SQLModel, table=True):
__tablename__ = 'vk_users' # type: ignore
id: int = Field(primary_key=True)
first_name: str | None = Field(default=None)
last_name: str | None = Field(default=None)
screen_name: str | None = Field(default=None)
class Tokens(SQLModel, table=True):
__tablename__ = 'vk_tokens' # type: ignore
user_id: int = Field(foreign_key='vk_users.id', primary_key=True)
access_token: str = Field()
refresh_token: str = Field()
device_id: str = Field()
expires_at: datetime = Field()

24
src/clients/__init__.py Normal file
View File

@ -0,0 +1,24 @@
from .vk.api import VKAPIClient
from .vk.auth import VKAuthClient
class ClientsObject:
_vk_api = None
_vk_auth = None
@property
def vk_api(self):
if not self._vk_api:
self._vk_api = VKAPIClient()
return self._vk_api
@property
def vk_auth(self):
if not self._vk_auth:
self._vk_auth = VKAuthClient()
return self._vk_auth
clients = ClientsObject()

View File

View File

@ -0,0 +1,61 @@
from collections.abc import Callable, Mapping
from logging import getLogger
from ssl import SSLContext
from typing import Any
from httpx import URL, Limits
from httpx import AsyncClient as AsyncHTTPXClient
from httpx import _config as c # type: ignore
from httpx import _types as t # type: ignore
from clients.base.transports._aiohttp import AiohttpTransport
from clients.base.transports._cache import AsyncCacheTransport
from clients.base.transports._rate_limit import AsyncRateLimit
class AsyncClient(AsyncHTTPXClient):
def __init__(
self,
*,
auth: t.AuthTypes | None = None,
params: t.QueryParamTypes | None = None,
headers: t.HeaderTypes | None = None,
cookies: t.CookieTypes | None = None,
verify: SSLContext | str | bool = True,
cert: t.CertTypes | None = None,
proxy: t.ProxyTypes | None = None,
mounts: Mapping[str, AiohttpTransport | AsyncRateLimit | None]
| None = None,
timeout: t.TimeoutTypes = c.DEFAULT_TIMEOUT_CONFIG,
follow_redirects: bool = False,
limits: Limits = c.DEFAULT_LIMITS,
max_redirects: int = c.DEFAULT_MAX_REDIRECTS,
event_hooks: Mapping[str, list[Callable[..., Any]]] | None = None,
base_url: URL | str = '',
trust_env: bool = True,
default_encoding: str | Callable[[bytes], str] = 'utf-8',
key: str | None = None,
limit: int | None = None,
) -> None:
super().__init__(
auth=auth,
params=params,
headers=headers,
cookies=cookies,
verify=verify,
cert=cert,
http1=True,
http2=False,
proxy=proxy,
mounts=mounts,
timeout=timeout,
follow_redirects=follow_redirects,
limits=limits,
max_redirects=max_redirects,
event_hooks=event_hooks,
base_url=base_url,
transport=AsyncCacheTransport(key, limit),
trust_env=trust_env,
default_encoding=default_encoding,
)
self.logger = getLogger(__name__)

View File

@ -0,0 +1,9 @@
from typing import Any
from httpx import Response as HTTPXResponse
from orjson import loads
class Response(HTTPXResponse):
def json(self, **kwargs: Any):
return loads(self.content, **kwargs)

View File

View File

@ -0,0 +1,102 @@
import asyncio
from logging import getLogger
from types import TracebackType
import aiohttp
import httpx
from clients.base.responses import Response
EXCEPTIONS = {
aiohttp.ClientError: httpx.RequestError,
aiohttp.ClientConnectionError: httpx.NetworkError,
aiohttp.ClientConnectorError: httpx.ConnectError,
aiohttp.ClientOSError: httpx.ConnectError,
aiohttp.ClientConnectionResetError: httpx.ConnectError,
aiohttp.ClientConnectorDNSError: httpx.ConnectError,
aiohttp.ClientSSLError: httpx.ProtocolError,
aiohttp.ClientConnectorCertificateError: httpx.ProtocolError,
aiohttp.ServerFingerprintMismatch: httpx.ProtocolError,
aiohttp.ClientProxyConnectionError: httpx.ProxyError,
aiohttp.ClientResponseError: httpx.HTTPStatusError,
aiohttp.ContentTypeError: httpx.DecodingError,
aiohttp.ClientPayloadError: httpx.ReadError,
aiohttp.ServerDisconnectedError: httpx.ReadError,
aiohttp.InvalidURL: httpx.InvalidURL,
aiohttp.TooManyRedirects: httpx.TooManyRedirects,
}
class AiohttpTransport(httpx.AsyncBaseTransport):
def __init__(
self,
session: aiohttp.ClientSession | None = None,
):
self.logger = getLogger(__name__)
self._session = session or aiohttp.ClientSession()
self._closed = False
def map_aiohttp_exception(self, exc: Exception):
for aiohttp_exc, httpx_exc in EXCEPTIONS.items():
if isinstance(exc, aiohttp_exc):
return httpx_exc(message=str(exc)) # type: ignore
if isinstance(exc, asyncio.TimeoutError):
return httpx.TimeoutException(str(exc))
return httpx.HTTPError(f'Unknown error: {exc!s}')
async def __aenter__(self):
await self._session.__aenter__()
return self
async def __aexit__(
self,
exc_type: type[BaseException] | None = None,
exc_value: BaseException | None = None,
traceback: TracebackType | None = None,
):
await self._session.__aexit__(exc_type, exc_value, traceback)
self._closed = True
async def aclose(self):
if not self._closed:
self._closed = True
await self._session.close()
async def handle_async_request(self, request: httpx.Request):
if self._closed:
msg = 'Transport is closed'
raise RuntimeError(msg)
try:
headers = dict(request.headers)
method = request.method
url = str(request.url)
content = request.content
async with self._session.request(
method=method,
url=url,
headers=headers,
data=content,
allow_redirects=True,
) as aiohttp_response:
content = await aiohttp_response.read()
httpx_headers = [
(k.lower(), v)
for k, v in aiohttp_response.headers.items()
if k.lower() != 'content-encoding'
]
return Response(
status_code=aiohttp_response.status,
headers=httpx_headers,
content=content,
request=request,
)
except Exception as e:
exception = self.map_aiohttp_exception(e)
self.logger.error(exception)
raise exception from e

View File

@ -0,0 +1,84 @@
from httpx import Request
from httpx import Response as HTTPXResponse
from httpx import _models as m # type: ignore
from orjson import dumps, loads
from clients.base.responses import Response
from shared.redis import client
from ._rate_limit import AsyncRateLimit
def generate_cache_key(request: Request):
request_data = {
'method': request.method,
'url': str(request.url),
}
return f'cache:{hash(str(dumps(request_data)))}'
def cache_response(
cache_key: str,
request: Request,
response: Response | HTTPXResponse,
) -> None:
serialized_response = serialize_response(response)
ttl = get_ttl_from_headers(request.headers)
if ttl:
client.set(cache_key, serialized_response, ex=ttl)
def get_ttl_from_headers(headers: m.Headers):
if 'X-Cache-TTL' in headers:
try:
return int(headers['X-Cache-TTL'])
except (ValueError, TypeError):
return None
def get_cached_response(cache_key: str):
cached_data = client.get(cache_key)
if cached_data:
return deserialize_response(cached_data)
return None
def serialize_response(response: Response | HTTPXResponse):
response_data = {
'status_code': response.status_code,
'headers': dict(response.headers),
'content': response.content.decode(),
}
return dumps(response_data)
def deserialize_response(serialized_response: bytes):
response_data = loads(serialized_response)
return Response(
status_code=response_data['status_code'],
headers=response_data['headers'],
content=response_data['content'].encode(),
)
class AsyncCacheTransport(AsyncRateLimit):
def __init__(self, key: str | None, limit: int | None):
self.transport = AsyncRateLimit(key, limit)
async def handle_async_request(self, request: Request):
cache_key = generate_cache_key(request)
cached_response = get_cached_response(cache_key)
if cached_response:
return cached_response
response = await self.transport.handle_async_request(request)
cache_response(cache_key, request, response)
return response

View File

@ -0,0 +1,45 @@
from asyncio import sleep as async_sleep
from httpx import Request
from clients.base.responses import Response
from shared.redis import client
from ._aiohttp import AiohttpTransport
class AsyncRateLimit(AiohttpTransport):
def __init__(self, key: str | None, limit: int | None):
self.transport = AiohttpTransport()
self.key = key
self.limit = limit
if (self.key and not self.limit) or (not self.key and self.limit):
msg = 'Incorrectly configured key or limit for rate limiting'
raise Exception(msg)
async def request_is_limited(self):
if self.key and self.limit:
t: int = int(client.time()[0]) # type: ignore
separation = round(60 / self.limit)
value = client.get(self.key) or t
client.setnx(self.key, value)
tat = max(int(value), t)
if tat - t <= 60 - separation:
new_tat = max(tat, t) + separation
client.set(self.key, new_tat)
return False
return True
return False
async def handle_async_request(self, request: Request) -> Response:
if await self.request_is_limited():
await async_sleep(0.25)
return await self.handle_async_request(request)
return await self.transport.handle_async_request(request)

View File

170
src/clients/vk/api.py Normal file
View File

@ -0,0 +1,170 @@
from typing import Literal
from pydantic.types import PositiveInt
from clients.base.clients import AsyncClient
from . import schema as s
class VKAPIClient(AsyncClient):
def __init__(self):
self.base_uri = 'https://api.vk.com/method'
self.version = '5.215'
super().__init__(base_url=self.base_uri)
async def account_getProfileInfo(self, access_token: str):
"""
https://dev.vk.com/ru/method/account.getProfileInfo
"""
req = await self.post(
'/account.getProfileInfo',
data={'access_token': access_token, 'v': self.version},
)
return s.ProfileInfo.model_validate(req.json()['response'])
async def photos_getWallUploadServer(
self, access_token: str, group_id: int | None = None
):
"""
https://dev.vk.com/ru/method/photos.getWallUploadServer
"""
req = await self.post(
'/photos.getWallUploadServer',
data={
'group_id': group_id,
'access_token': access_token,
'v': self.version,
},
)
if 'response' not in req.json():
self.logger.error(req.json())
return s.UploadServerResponse.model_validate(req.json()['response'])
async def photos_saveWallPhoto(
self,
access_token: str,
photo: tuple[bytes, str],
server: str,
_hash: str,
user_id: PositiveInt | None = None,
group_id: PositiveInt | None = None,
latitude: float | None = None,
longitude: float | None = None,
caption: str | None = None,
):
"""
https://dev.vk.com/ru/method/photos.saveWallPhoto
"""
data = {
'server': server,
'hash': _hash,
'user_id': user_id,
'group_id': group_id,
'latitude': latitude,
'longitude': longitude,
'caption': caption,
'access_token': access_token,
'photo': photo,
'v': self.version,
}
req = await self.post(
'/photos.saveWallPhoto',
data=data,
)
if 'error' in req.json():
self.logger.error(req.json())
return s.SaveWallPhotoResponse.model_validate(req.json())
async def groups_getById(
self,
access_token: str,
group_ids: list[str] | None = None,
group_id: str | None = None,
fields: list[Literal['can_post']] | None = None,
):
"""
https://dev.vk.com/ru/method/groups.getById
"""
req = await self.post(
'/groups.getById',
data={
'group_id': group_id,
'group_ids': ', '.join(group_ids) if group_ids else None,
'fields': fields,
'access_token': access_token,
'v': self.version,
},
)
return [
s.Group.model_validate(group)
for group in req.json()['response']['groups']
]
async def wall_post(
self,
access_token: str,
owner_id: int | None = None,
friends_only: Literal[0, 1] = 0,
from_group: Literal[0, 1] = 0,
message: str | None = None,
attachments: list[str] | None = None,
services: list[str] | None = None,
signed: Literal[0, 1] = 0,
publish_date: str | None = None,
lat: int | None = None,
long: int | None = None,
place_id: int | None = None,
post_id: int | None = None,
guid: str | None = None,
mark_as_ads: Literal[0, 1] = 0,
link_title: str | None = None,
link_photo_id: str | None = None,
close_comments: Literal[0, 1] = 0,
donut_paid_duration: int | None = None,
mute_notifications: Literal[0, 1] | None = None,
_copyright: str | None = None,
):
"""
https://dev.vk.com/ru/method/wall.post
"""
req = await self.post(
'/wall.post',
data={
'owner_id': owner_id,
'friends_only': friends_only,
'from_group': from_group,
'message': message,
'attachments': ','.join(attachments) if attachments else None,
'services': ','.join(services) if services else None,
'signed': signed,
'publish_date': publish_date,
'lat': lat,
'long': long,
'place_id': place_id,
'post_id': post_id,
'guid': guid,
'mark_as_ads': mark_as_ads,
'link_title': link_title,
'link_photo_id': link_photo_id,
'close_comments': close_comments,
'donut_paid_duration': donut_paid_duration,
'mute_notifications': mute_notifications,
'copyright': _copyright,
'access_token': access_token,
'v': self.version,
},
)
if 'response' not in req.json():
self.logger.error(req.json())
return s.WallPostResponse.model_validate(req.json()['response'])

77
src/clients/vk/auth.py Normal file
View File

@ -0,0 +1,77 @@
from base64 import urlsafe_b64encode
from hashlib import sha256
from secrets import token_urlsafe
from urllib.parse import urlencode
from apps.vk.constants import SCOPES
from clients.base.clients import AsyncClient
from core.config import settings
from .schema import AuthModel
class VKAuthClient(AsyncClient):
def __init__(self):
self.base_uri = 'https://id.vk.com'
super().__init__(base_url=self.base_uri)
def get_login_url(self):
code_verifier = token_urlsafe(16)
code_challenge = (
urlsafe_b64encode(sha256(code_verifier.encode()).digest())
.rstrip(b'=')
.decode()
)
scopes_str = ' '.join(SCOPES)
base_redirect_url = f'{self.base_uri}/authorize?'
query = {
'response_type': 'code',
'client_id': settings.VK_CLIENT_ID,
'code_challenge': code_challenge,
'code_challenge_method': 'S256',
'redirect_uri': settings.VK_REDIRECT_URI,
'scope': scopes_str,
'lang_id': 0,
'scheme': 'dark',
'state': code_verifier,
}
return base_redirect_url + urlencode(query)
async def get_access_token(
self, code: str, code_verifier: str, state: str, device_id: str
):
req = await self.post(
'/oauth2/auth',
json={
'grant_type': 'authorization_code',
'code_verifier': code_verifier,
'redirect_uri': settings.VK_REDIRECT_URI,
'code': code,
'state': state,
'client_id': settings.VK_CLIENT_ID,
'device_id': device_id,
},
)
try:
return AuthModel.model_validate(req.json())
except Exception:
self.logger.error(req.json())
raise
async def refresh_access_token(self, refresh_token: str, device_id: str):
req = await self.post(
'/oauth2/auth',
json={
'grant_type': 'refresh_token',
'refresh_token': refresh_token,
'client_id': settings.VK_CLIENT_ID,
'device_id': device_id,
},
)
return AuthModel.model_validate(req.json())

81
src/clients/vk/schema.py Normal file
View File

@ -0,0 +1,81 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel
from pydantic.types import PositiveInt
class AuthModel(BaseModel):
refresh_token: str
access_token: str
id_token: str
token_type: Literal['Bearer']
expires_in: PositiveInt
user_id: PositiveInt
scope: str
class RefreshModel(BaseModel):
refresh_token: str
access_token: str
token_type: Literal['Bearer']
expires_in: PositiveInt
user_id: PositiveInt
scope: str
class City(BaseModel):
id: int
title: str
class ProfileInfo(BaseModel):
id: int
home_town: str
status: str
photo_200: str
is_service_account: bool
bdate: str
verification_status: str
promo_verifications: list[str]
first_name: str
last_name: str
bdate_visibility: int
city: City | None = None
phone: str
relation: int
screen_name: str | None = None
sex: int
class Group(BaseModel):
id: int
name: str
screen_name: str
is_closed: int
type: str
photo_50: str
photo_100: str
photo_200: str
class UploadServerResponse(BaseModel):
upload_url: str
album_id: int
user_id: int
class Photo(BaseModel):
id: int
album_id: int
owner_id: int
text: str
date: datetime
class SaveWallPhotoResponse(BaseModel):
response: list[Photo]
class WallPostResponse(BaseModel):
post_id: int

0
src/core/__init__.py Normal file
View File

56
src/core/config.py Normal file
View File

@ -0,0 +1,56 @@
from os import environ
from os.path import exists
from pydantic import Field, model_validator
from pydantic_extra_types.semantic_version import SemanticVersion
from pydantic_settings import BaseSettings, SettingsConfigDict
def get_version():
if exists('.version'):
with open('.version', encoding='utf-8') as f:
return SemanticVersion.parse(f.read().strip())
return SemanticVersion.parse('0.0.0')
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file='.env',
validate_default=False,
extra='ignore',
)
# App info
APP_NAME: str = 'VK Resender API'
APP_DESCRIPTION: str = 'API for the VK Resender'
APP_PORT: int = Field(default=9000)
VERSION: SemanticVersion = Field(default_factory=get_version)
DEBUG: bool = Field(default=False)
# Security
SECRET_KEY: str = Field(default='secret')
ALGORITHM: str = 'HS256'
# Database
DATABASE_URL: str = Field(default='sqlite:///sql.db')
# Redis
REDIS_URL: str = Field(default='redis://localhost:6379/0')
# Loki Logging
LOKI_URL: str | None = Field(default=None)
# VK
VK_CLIENT_ID: str = Field(default='')
VK_REDIRECT_URI: str = Field(default='')
@model_validator(mode='after')
def celery_env(self):
environ['CELERY_BROKER_URL'] = self.REDIS_URL
environ['CELERY_RESULT_BACKEND'] = self.REDIS_URL
return self
settings = Settings()

54
src/core/exceptions.py Normal file
View File

@ -0,0 +1,54 @@
from logging import getLogger
from fastapi import FastAPI, Request, Response, status
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import (
RequestValidationError,
WebSocketRequestValidationError,
)
from fastapi.responses import ORJSONResponse
from fastapi.utils import is_body_allowed_for_status_code
from fastapi.websockets import WebSocket
from starlette.exceptions import HTTPException
logger = getLogger(__name__)
logger_format = '%s: %s'
def register_exceptions(app: FastAPI):
@app.exception_handler(HTTPException)
async def http_exception_handler(request: Request, exc: HTTPException): # type: ignore
headers = getattr(exc, 'headers', None)
if not is_body_allowed_for_status_code(exc.status_code):
return Response(status_code=exc.status_code, headers=headers)
return ORJSONResponse(
status_code=exc.status_code,
content={'detail': exc.detail},
headers=headers,
)
@app.exception_handler(RequestValidationError)
async def validation_exception_handler( # type: ignore
request: Request,
exc: RequestValidationError,
):
logger.warning(logger_format, 'Validation Error', exc.body)
return ORJSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content=jsonable_encoder({'detail': exc.errors()}),
)
@app.exception_handler(WebSocketRequestValidationError)
async def websocket_validation_exception_handler( # type: ignore
websocket: WebSocket,
exc: WebSocketRequestValidationError,
):
logger.warning(
logger_format, 'WebSocket Validation Error', exc.errors()
)
return await websocket.close(
code=status.WS_1008_POLICY_VIOLATION,
reason=jsonable_encoder(exc.errors()),
)
return app

67
src/core/log.py Normal file
View File

@ -0,0 +1,67 @@
import logging
from typing import Any
from logging_loki import LokiHandler as Loki # type: ignore
from core.config import settings
class LokiHandler(Loki):
def __init__(self):
if not settings.LOKI_URL:
msg = 'LOKI_URL is not set'
raise ValueError(msg)
super().__init__( # type: ignore
settings.LOKI_URL,
tags={
'application': settings.APP_NAME,
'version': str(settings.VERSION),
},
version='1',
)
class Config:
def __init__(self):
self.version = 1
self.disable_existing_loggers = False
self.handlers = self._get_handlers()
self.loggers = self._get_loggers()
@staticmethod
def _get_handlers():
handlers: dict[str, Any] = {
'console': {
'class': 'logging.StreamHandler',
'level': logging.INFO,
'stream': 'ext://sys.stderr',
}
}
if settings.LOKI_URL:
handlers['loki'] = {'class': LokiHandler}
return handlers
def _get_loggers(self):
loggers = {
'': {
'level': logging.INFO,
'handlers': list(self.handlers.keys()),
'propagate': False,
},
}
return loggers
def render(self):
return {
'version': self.version,
'disable_existing_loggers': self.disable_existing_loggers,
'handlers': self.handlers,
'loggers': self.loggers,
}
config = Config().render()

28
src/core/main.py Normal file
View File

@ -0,0 +1,28 @@
from logging import getLogger
from database import lifespan
from fastapi import FastAPI
from fastapi.responses import ORJSONResponse
from middlewares import register_middlewares
from .config import settings
from .exceptions import register_exceptions
from .routers.v1 import router as v1_router
logger = getLogger(__name__)
app = FastAPI(
debug=settings.DEBUG,
title=settings.APP_NAME,
description=settings.APP_DESCRIPTION,
version=str(settings.VERSION),
openapi_url=None,
default_response_class=ORJSONResponse,
lifespan=lifespan,
docs_url=None,
redoc_url=None,
)
app = register_middlewares(app)
app = register_exceptions(app)
app.include_router(v1_router)

View File

@ -0,0 +1,32 @@
from fastapi import APIRouter
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.openapi.utils import get_openapi
from fastapi.responses import ORJSONResponse
from core.config import settings
def get_openapi_schema(router: APIRouter):
if not settings.DEBUG:
return None
return ORJSONResponse(
get_openapi(
title=settings.APP_NAME,
version=str(settings.VERSION),
description=settings.APP_DESCRIPTION,
routes=router.routes,
servers=[],
contact={'Miwory': 'admin@meowly.ru'},
)
)
def get_swagger_html(router: APIRouter):
if not settings.DEBUG:
return None
return get_swagger_ui_html(
openapi_url=f'{router.prefix}/openapi.json',
title='Docs',
)

24
src/core/routers/v1.py Normal file
View File

@ -0,0 +1,24 @@
from fastapi import APIRouter, HTTPException
from . import get_openapi_schema, get_swagger_html
router = APIRouter(prefix='/v1')
openapi_schema = get_openapi_schema(router)
swagger_ui_html = get_swagger_html(router)
@router.get('/openapi.json', include_in_schema=False)
async def openapi():
if openapi_schema is None:
raise HTTPException(status_code=404)
return openapi_schema
@router.get('/docs', include_in_schema=False)
async def docs():
if swagger_ui_html is None:
raise HTTPException(status_code=404)
return swagger_ui_html

28
src/core/worker.py Normal file
View File

@ -0,0 +1,28 @@
from logging.config import dictConfig
from typing import Any
from celery import Celery
from celery.signals import setup_logging
from .config import settings
from .log import config as log_config
celery = Celery(__name__)
celery.conf.update(
timezone='UTC',
broker_url=settings.REDIS_URL,
result_backend=settings.REDIS_URL,
broker_connection_retry_on_startup=True,
broker_transport_options={
'visibility_timeout': 3600,
'retry_policy': {'timeout': 5},
},
worker_hijack_root_logger=False,
beat_schedule={},
)
@setup_logging.connect
def setup_loggers(*args: Any, **kwargs: Any):
dictConfig(log_config)

34
src/database/__init__.py Normal file
View File

@ -0,0 +1,34 @@
from contextlib import asynccontextmanager
from logging import getLogger
from typing import Annotated
from alembic.command import upgrade
from alembic.config import Config
from fastapi import Depends, FastAPI
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import Session
from core.config import settings
from database.manager import DBManager
logger = getLogger(__name__)
db_manager = DBManager(settings.DATABASE_URL)
SyncSessionDep = Annotated[Session, Depends(db_manager.sync_session)]
AsyncSessionDep = Annotated[
AsyncSession,
Depends(db_manager.async_session),
]
@asynccontextmanager
async def lifespan(app: FastAPI):
log_format = '%s: %s'
logger.info(log_format, 'App Name', settings.APP_NAME)
logger.info(log_format, 'App Description', settings.APP_DESCRIPTION)
logger.info(log_format, 'App Version', settings.VERSION)
config = Config('alembic.ini')
upgrade(config, 'head')
yield

41
src/database/manager.py Normal file
View File

@ -0,0 +1,41 @@
from contextlib import asynccontextmanager, contextmanager
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlmodel import Session, create_engine
class DBManager:
sync_url = ''
async_url = ''
def __init__(self, database_url: str):
self.sync_url, self.async_url = self._initialize_urls(database_url)
self.sync_engine = create_engine(self.sync_url)
self.async_engine = create_async_engine(self.async_url)
def _initialize_urls(self, database_url: str):
url_parts = database_url.split('://')
return (
f'postgresql+psycopg://{url_parts[1]}',
f'postgresql+asyncpg://{url_parts[1]}',
)
def sync_session(self):
with Session(self.sync_engine) as session:
yield session
async def async_session(self):
async with AsyncSession(self.async_engine) as session:
yield session
@contextmanager
def sync_context_session(self):
with Session(self.sync_engine) as session:
yield session
@asynccontextmanager
async def async_context_session(self):
async with AsyncSession(self.async_engine) as session:
yield session

16
src/gunicorn.conf.py Normal file
View File

@ -0,0 +1,16 @@
import multiprocessing
from core.config import settings
from core.log import config as log_config
wsgi_app = 'core.main:app'
bind = f'0.0.0.0:{settings.APP_PORT}'
workers = 1 if settings.DEBUG else multiprocessing.cpu_count() * 2 + 1
worker_class = 'uvicorn_worker.UvicornWorker'
logconfig_dict = log_config
threads = 2
max_requests = 10000
max_requests_jitter = 2000
timeout = 30
keepalive = 2
preload_app = True

View File

@ -0,0 +1,20 @@
from logging import getLogger
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .access_log_middleware import AccessLogMiddleware
logger = getLogger(__name__)
def register_middlewares(app: FastAPI):
app.add_middleware(AccessLogMiddleware)
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_methods=['*'],
allow_headers=['*'],
)
return app

View File

@ -0,0 +1,108 @@
from logging import getLogger
from re import findall
from time import perf_counter
from starlette.types import ASGIApp, Message, Receive, Scope, Send
from core.config import settings
LOCALHOST = '127.0.0.1'
BROWSERS = {
'firefox': 'Firefox',
'yabrowser': 'Yandex',
'samsungbrowser': 'Samsung Internet',
'trident': 'Internet Explorer',
'opera': 'Opera',
'vivaldi': 'Vivaldi',
'brave': 'Brave',
'edg': 'Edge',
'chrome': 'Chrome',
'safari': 'Safari',
'chromium': 'Chromium',
'msie': 'Internet Explorer',
}
class AccessLogMiddleware:
def __init__(self, app: ASGIApp):
self.app = app
self.logger = getLogger(__name__)
self.version = (
b'Version',
f'{settings.VERSION}'.encode(),
)
async def detect_browser(self, headers: dict[bytes, bytes]):
if b'user-agent' not in headers:
return 'unknown'
user_agent = headers[b'user-agent'].decode().lower()
for k, v in BROWSERS.items():
if findall(k, user_agent):
return v
return 'unknown'
@staticmethod
async def get_client_ip(
headers: dict[bytes, bytes],
default_ip: str = LOCALHOST,
):
if b'x-forwarded-for' not in headers:
return default_ip
ips = headers[b'x-forwarded-for'].decode().split(',')
if len(ips) > 1:
return ips[-1].strip()
return ips[0]
async def __call__(self, scope: Scope, receive: Receive, send: Send):
if scope['type'] != 'http':
return await self.app(scope, receive, send)
start_time = perf_counter()
async def send_wrapper(message: Message) -> None:
if message['type'] != 'http.response.start':
return await send(message)
headers = dict(scope.get('headers', []))
client_ip = await self.get_client_ip(headers, scope['client'][0])
browser = await self.detect_browser(headers)
response_time = (perf_counter() - start_time) * 1000
response_data = f'dur={response_time:.2f}'
response = (
b'Server-Timing',
f'resp;{response_data};desc="Response Time"'.encode(),
)
message['headers'] = message['headers'] + [response, self.version]
self.logger.info(
'%s - %s %s %d [%0.2fms]',
client_ip,
scope['method'],
scope['path'],
message['status'],
response_time,
extra={
'tags': {
'method': scope['method'],
'path': scope['path'],
'status': message['status'],
'response_time': response_time,
'client_ip': client_ip,
'browser': browser,
},
},
)
await send(message)
return await self.app(scope, receive, send_wrapper)

1
src/migrations/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

54
src/migrations/env.py Normal file
View File

@ -0,0 +1,54 @@
from logging.config import dictConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
from sqlmodel import SQLModel
from apps.vk.models import * # noqa: F403
from core.log import config as log_config
from database import db_manager
dictConfig(log_config)
config = context.config
url = db_manager.sync_url
target_metadata = SQLModel.metadata
def run_migrations_offline() -> None:
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={'paramstyle': 'named'},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
cfg = config.get_section(config.config_ini_section, {})
cfg['sqlalchemy.url'] = url
connectable = engine_from_config(
cfg,
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,32 @@
"""
${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from collections.abc import Sequence
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from alembic import op
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: str | None = ${repr(down_revision)}
branch_labels: str | Sequence[str] | None = ${repr(branch_labels)}
depends_on: str | Sequence[str] | None = ${repr(depends_on)}
if not sqlmodel.sql:
msg = 'Something went wrong'
raise Exception(msg)
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,70 @@
"""
empty message
Revision ID: 5afca0f35b4c
Revises:
Create Date: 2025-06-08 15:39:46.562682
"""
from collections.abc import Sequence
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '5afca0f35b4c'
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
if not sqlmodel.sql:
msg = 'Something went wrong'
raise Exception(msg)
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'vk_users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column(
'first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column(
'last_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column(
'screen_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'vk_tokens',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column(
'access_token', sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column(
'refresh_token', sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column(
'device_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
['user_id'],
['vk_users.id'],
),
sa.PrimaryKeyConstraint('user_id'),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('vk_tokens')
op.drop_table('vk_users')
# ### end Alembic commands ###

View File

22
src/server.py Normal file
View File

@ -0,0 +1,22 @@
from uvicorn import Config, Server
from core.config import settings
from core.log import config as log_config
def main():
config = Config(
'core.main:app',
host='0.0.0.0',
port=settings.APP_PORT,
log_config=log_config,
log_level='info',
reload=settings.DEBUG,
access_log=False,
)
server = Server(config)
server.run()
if __name__ == '__main__':
main()

0
src/shared/__init__.py Normal file
View File

12
src/shared/redis.py Normal file
View File

@ -0,0 +1,12 @@
from redis import Redis
from core.config import settings
client = Redis.from_url(settings.REDIS_URL)
def clear_cache(key: str):
with client.pipeline() as pipe:
keys = list(client.scan_iter(key))
if keys:
pipe.delete(*keys).execute()

1
userlist.txt Normal file
View File

@ -0,0 +1 @@
"postgres" "example"