first commit
This commit is contained in:
16
.gitignore
vendored
Normal file
16
.gitignore
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
# Python-generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
|
||||
# Ruff
|
||||
.ruff_cache
|
||||
|
||||
# uv
|
||||
uv.lock
|
||||
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@ -0,0 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.36.3
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/RobertCraigie/pyright-python
|
||||
rev: v1.1.405
|
||||
hooks:
|
||||
- id: pyright
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
- id: check-yaml
|
||||
- id: debug-statements
|
||||
- id: check-merge-conflict
|
||||
- id: double-quote-string-fixer
|
||||
- id: end-of-file-fixer
|
||||
|
||||
- repo: meta
|
||||
hooks:
|
||||
- id: check-hooks-apply
|
||||
- id: check-useless-excludes
|
||||
93
pyproject.toml
Normal file
93
pyproject.toml
Normal file
@ -0,0 +1,93 @@
|
||||
[project]
|
||||
name = "aiohttpx"
|
||||
version = "0.1.0"
|
||||
description = "Custom HTTPX client with aiohttp transport, rate limiter and caching"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Miwory", email = "miwory.uwu@gmail.com" }
|
||||
]
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"aiohttp==3.13.1",
|
||||
"httpx==0.28.1",
|
||||
"orjson==3.11.4",
|
||||
"redis[hiredis]==7.0.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
aiohttpx = "aiohttpx:main"
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"ruff==0.14.2",
|
||||
"pyright==1.1.406",
|
||||
"poethepoet==0.37.0",
|
||||
"pre-commit==4.3.0",
|
||||
"types-redis==4.6.0.20241004",
|
||||
]
|
||||
|
||||
[tool.poe.tasks]
|
||||
_git = "git add ."
|
||||
_lint = "pre-commit run --all-files"
|
||||
|
||||
lint = ["_git", "_lint"]
|
||||
check = "uv pip ls --outdated"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.9.2,<0.10.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.pyright]
|
||||
venvPath = "."
|
||||
venv = ".venv"
|
||||
strictListInference = true
|
||||
strictDictionaryInference = true
|
||||
strictSetInference = true
|
||||
deprecateTypingAliases = true
|
||||
typeCheckingMode = "strict"
|
||||
pythonPlatform = "All"
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py313"
|
||||
line-length = 79
|
||||
fix = true
|
||||
|
||||
[tool.ruff.lint]
|
||||
preview = true
|
||||
select = [
|
||||
"E",
|
||||
"W",
|
||||
"F",
|
||||
"UP",
|
||||
"A",
|
||||
"B",
|
||||
"C4",
|
||||
"SIM",
|
||||
"I",
|
||||
"S",
|
||||
"G",
|
||||
"FAST",
|
||||
"ASYNC",
|
||||
"BLE",
|
||||
"INT",
|
||||
"ISC",
|
||||
"ICN",
|
||||
"PYI",
|
||||
"INP",
|
||||
"RSE",
|
||||
"PIE",
|
||||
"SLOT",
|
||||
"TID",
|
||||
"LOG",
|
||||
"FBT",
|
||||
"DTZ",
|
||||
"EM",
|
||||
"PERF",
|
||||
"RUF",
|
||||
]
|
||||
ignore = ["RUF029", "S101", "S104"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "single"
|
||||
indent-style = "space"
|
||||
docstring-code-format = true
|
||||
1
src/aiohttpx/__init__.py
Normal file
1
src/aiohttpx/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
__version__: str = '0.1.0'
|
||||
62
src/aiohttpx/client.py
Normal file
62
src/aiohttpx/client.py
Normal file
@ -0,0 +1,62 @@
|
||||
from collections.abc import Callable, Mapping
|
||||
from logging import getLogger
|
||||
from ssl import SSLContext
|
||||
from typing import Any
|
||||
|
||||
from httpx import URL, Limits
|
||||
from httpx import AsyncClient as AsyncHTTPXClient
|
||||
from httpx import _config as c # type: ignore
|
||||
from httpx import _types as t # type: ignore
|
||||
|
||||
from aiohttpx.transports.cache import AsyncCacheTransport
|
||||
|
||||
|
||||
class AioHTTPXClient(AsyncHTTPXClient):
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
auth: t.AuthTypes | None = None,
|
||||
params: t.QueryParamTypes | None = None,
|
||||
headers: t.HeaderTypes | None = None,
|
||||
cookies: t.CookieTypes | None = None,
|
||||
verify: SSLContext | str | bool = True,
|
||||
cert: t.CertTypes | None = None,
|
||||
proxy: t.ProxyTypes | None = None,
|
||||
timeout: t.TimeoutTypes = c.DEFAULT_TIMEOUT_CONFIG,
|
||||
follow_redirects: bool = False,
|
||||
limits: Limits = c.DEFAULT_LIMITS,
|
||||
max_redirects: int = c.DEFAULT_MAX_REDIRECTS,
|
||||
event_hooks: Mapping[str, list[Callable[..., Any]]] | None = None,
|
||||
base_url: URL | str = '',
|
||||
trust_env: bool = True,
|
||||
default_encoding: str | Callable[[bytes], str] = 'utf-8',
|
||||
redis_url: str | None = None,
|
||||
key: str | None = None,
|
||||
limit: int | None = None,
|
||||
):
|
||||
super().__init__(
|
||||
auth=auth,
|
||||
params=params,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
verify=verify,
|
||||
cert=cert,
|
||||
http1=True,
|
||||
http2=False,
|
||||
proxy=proxy,
|
||||
mounts=None,
|
||||
timeout=timeout,
|
||||
follow_redirects=follow_redirects,
|
||||
limits=limits,
|
||||
max_redirects=max_redirects,
|
||||
event_hooks=event_hooks,
|
||||
base_url=base_url,
|
||||
transport=AsyncCacheTransport(redis_url, key, limit),
|
||||
trust_env=trust_env,
|
||||
default_encoding=default_encoding,
|
||||
)
|
||||
|
||||
self.logger = getLogger(__name__)
|
||||
|
||||
|
||||
__all__ = ['AioHTTPXClient']
|
||||
9
src/aiohttpx/responses/__init__.py
Normal file
9
src/aiohttpx/responses/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
from typing import Any
|
||||
|
||||
from httpx import Response as HTTPXResponse
|
||||
from orjson import loads
|
||||
|
||||
|
||||
class Response(HTTPXResponse):
|
||||
def json(self, **kwargs: Any):
|
||||
return loads(self.content, **kwargs)
|
||||
0
src/aiohttpx/transports/__init__.py
Normal file
0
src/aiohttpx/transports/__init__.py
Normal file
129
src/aiohttpx/transports/aio.py
Normal file
129
src/aiohttpx/transports/aio.py
Normal file
@ -0,0 +1,129 @@
|
||||
import asyncio
|
||||
from logging import getLogger
|
||||
from types import TracebackType
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import httpx
|
||||
|
||||
from aiohttpx.responses import Response
|
||||
|
||||
EXCEPTIONS = {
|
||||
aiohttp.ClientError: httpx.RequestError,
|
||||
aiohttp.ClientConnectionError: httpx.NetworkError,
|
||||
aiohttp.ClientConnectorError: httpx.ConnectError,
|
||||
aiohttp.ClientOSError: httpx.ConnectError,
|
||||
aiohttp.ClientConnectionResetError: httpx.ConnectError,
|
||||
aiohttp.ClientConnectorDNSError: httpx.ConnectError,
|
||||
aiohttp.ClientSSLError: httpx.ProtocolError,
|
||||
aiohttp.ClientConnectorCertificateError: httpx.ProtocolError,
|
||||
aiohttp.ServerFingerprintMismatch: httpx.ProtocolError,
|
||||
aiohttp.ClientProxyConnectionError: httpx.ProxyError,
|
||||
aiohttp.ClientResponseError: httpx.HTTPStatusError,
|
||||
aiohttp.ContentTypeError: httpx.DecodingError,
|
||||
aiohttp.ClientPayloadError: httpx.ReadError,
|
||||
aiohttp.ServerDisconnectedError: httpx.ReadError,
|
||||
aiohttp.InvalidURL: httpx.InvalidURL,
|
||||
aiohttp.TooManyRedirects: httpx.TooManyRedirects,
|
||||
}
|
||||
|
||||
|
||||
class AiohttpTransport(httpx.AsyncBaseTransport):
|
||||
def __init__(
|
||||
self,
|
||||
session: aiohttp.ClientSession | None = None,
|
||||
):
|
||||
self.logger = getLogger(__name__)
|
||||
self._session = session or aiohttp.ClientSession()
|
||||
self._closed = False
|
||||
|
||||
def map_aiohttp_exception(self, exc: Exception):
|
||||
for aiohttp_exc, httpx_exc in EXCEPTIONS.items():
|
||||
if isinstance(exc, aiohttp_exc):
|
||||
return httpx_exc(message=str(exc)) # type: ignore
|
||||
|
||||
if isinstance(exc, asyncio.TimeoutError):
|
||||
return httpx.TimeoutException(str(exc))
|
||||
|
||||
return httpx.HTTPError(f'Unknown error: {exc!s}')
|
||||
|
||||
async def __aenter__(self):
|
||||
await self._session.__aenter__()
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None = None,
|
||||
exc_value: BaseException | None = None,
|
||||
traceback: TracebackType | None = None,
|
||||
):
|
||||
await self._session.__aexit__(exc_type, exc_value, traceback)
|
||||
self._closed = True
|
||||
|
||||
async def aclose(self):
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
await self._session.close()
|
||||
|
||||
async def handle_async_request(self, request: httpx.Request):
|
||||
headers = dict(request.headers)
|
||||
method = request.method
|
||||
url = str(request.url)
|
||||
content = request.content
|
||||
|
||||
for i in range(5):
|
||||
try:
|
||||
return await self.make_request(method, url, headers, content)
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
exception = self.map_aiohttp_exception(e)
|
||||
|
||||
if not isinstance(
|
||||
exception,
|
||||
(
|
||||
httpx.NetworkError,
|
||||
httpx.ConnectError,
|
||||
httpx.ConnectTimeout,
|
||||
),
|
||||
):
|
||||
self.logger.error(
|
||||
exception, extra={'reason': 'Request error'}
|
||||
)
|
||||
raise exception from e
|
||||
|
||||
if i == 4:
|
||||
self.logger.error(
|
||||
exception, extra={'reason': 'Too many retries'}
|
||||
)
|
||||
raise exception from e
|
||||
|
||||
msg = 'Unknown error'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
async def make_request(
|
||||
self, method: str, url: str, headers: dict[str, Any], data: bytes
|
||||
):
|
||||
if self._closed:
|
||||
msg = 'Cannot make request: Transport session is closed'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
async with self._session.request(
|
||||
method=method,
|
||||
url=url,
|
||||
headers=headers,
|
||||
data=data,
|
||||
allow_redirects=True,
|
||||
) as aiohttp_response:
|
||||
content = await aiohttp_response.read()
|
||||
|
||||
httpx_headers = [
|
||||
(k.lower(), v)
|
||||
for k, v in aiohttp_response.headers.items()
|
||||
if k.lower() != 'content-encoding'
|
||||
]
|
||||
|
||||
return Response(
|
||||
status_code=aiohttp_response.status,
|
||||
headers=httpx_headers,
|
||||
content=content,
|
||||
)
|
||||
93
src/aiohttpx/transports/cache.py
Normal file
93
src/aiohttpx/transports/cache.py
Normal file
@ -0,0 +1,93 @@
|
||||
from httpx import Request
|
||||
from httpx import Response as HTTPXResponse
|
||||
from httpx import _models as m # type: ignore
|
||||
from orjson import dumps, loads
|
||||
|
||||
from aiohttpx.responses import Response
|
||||
from aiohttpx.transports.rate_limiter import AsyncRateLimit, Redis
|
||||
|
||||
|
||||
def generate_cache_key(request: Request):
|
||||
request_data = {
|
||||
'method': request.method,
|
||||
'url': str(request.url),
|
||||
}
|
||||
return f'cache:{hash(str(dumps(request_data)))}'
|
||||
|
||||
|
||||
def cache_response(
|
||||
client: Redis[bytes],
|
||||
cache_key: str,
|
||||
request: Request,
|
||||
response: Response | HTTPXResponse,
|
||||
) -> None:
|
||||
serialized_response = serialize_response(response)
|
||||
ttl = get_ttl_from_headers(request.headers)
|
||||
|
||||
if ttl:
|
||||
client.set(cache_key, serialized_response, ex=ttl)
|
||||
|
||||
|
||||
def get_ttl_from_headers(headers: m.Headers):
|
||||
if 'X-Cache-TTL' in headers:
|
||||
try:
|
||||
return int(headers['X-Cache-TTL'])
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def get_cached_response(client: Redis[bytes], cache_key: str):
|
||||
cached_data = client.get(cache_key)
|
||||
|
||||
if cached_data:
|
||||
return deserialize_response(cached_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def serialize_response(response: Response | HTTPXResponse):
|
||||
response_data = {
|
||||
'status_code': response.status_code,
|
||||
'headers': dict(response.headers),
|
||||
'content': response.content.decode(),
|
||||
}
|
||||
|
||||
return dumps(response_data)
|
||||
|
||||
|
||||
def deserialize_response(serialized_response: bytes):
|
||||
response_data = loads(serialized_response)
|
||||
|
||||
return Response(
|
||||
status_code=response_data['status_code'],
|
||||
headers=response_data['headers'],
|
||||
content=response_data['content'].encode(),
|
||||
)
|
||||
|
||||
|
||||
class AsyncCacheTransport(AsyncRateLimit):
|
||||
def __init__(
|
||||
self, redis_url: str | None, key: str | None, limit: int | None
|
||||
):
|
||||
if redis_url:
|
||||
self.client = Redis.from_url(redis_url)
|
||||
else:
|
||||
self.client = None
|
||||
|
||||
self.transport = AsyncRateLimit(self.client, key, limit)
|
||||
|
||||
async def handle_async_request(self, request: Request):
|
||||
if not self.client:
|
||||
return await self.transport.handle_async_request(request)
|
||||
|
||||
cache_key = generate_cache_key(request)
|
||||
cached_response = get_cached_response(self.client, cache_key)
|
||||
|
||||
if cached_response:
|
||||
return cached_response
|
||||
|
||||
response = await self.transport.handle_async_request(request)
|
||||
|
||||
cache_response(self.client, cache_key, request, response)
|
||||
|
||||
return response
|
||||
55
src/aiohttpx/transports/rate_limiter.py
Normal file
55
src/aiohttpx/transports/rate_limiter.py
Normal file
@ -0,0 +1,55 @@
|
||||
from asyncio import sleep as async_sleep
|
||||
|
||||
from httpx import Request
|
||||
from redis import Redis
|
||||
|
||||
from aiohttpx.responses import Response
|
||||
from aiohttpx.transports.aio import AiohttpTransport
|
||||
|
||||
|
||||
class AsyncRateLimit(AiohttpTransport):
|
||||
def __init__(
|
||||
self, redis: Redis[bytes] | None, key: str | None, limit: int | None
|
||||
):
|
||||
self.transport = AiohttpTransport()
|
||||
self.client = redis
|
||||
self.key = key
|
||||
self.limit = limit
|
||||
|
||||
if (
|
||||
(key and not redis)
|
||||
or (key and not limit)
|
||||
or (limit and not redis)
|
||||
or (limit and not key)
|
||||
):
|
||||
msg = 'Incorrectly configured for rate limiting'
|
||||
raise Exception(msg)
|
||||
|
||||
async def request_is_limited(self):
|
||||
if self.client and self.key and self.limit:
|
||||
t: int = int(self.client.time()[0]) # type: ignore
|
||||
separation = round(60 / self.limit)
|
||||
|
||||
value = self.client.get(self.key) or t
|
||||
self.client.setnx(self.key, value)
|
||||
|
||||
tat = max(int(value), t)
|
||||
|
||||
if tat - t <= 60 - separation:
|
||||
new_tat = max(tat, t) + separation
|
||||
self.client.set(self.key, new_tat)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def handle_async_request(self, request: Request) -> Response:
|
||||
if not self.client:
|
||||
return await self.transport.handle_async_request(request)
|
||||
|
||||
if await self.request_is_limited():
|
||||
await async_sleep(0.25)
|
||||
return await self.handle_async_request(request)
|
||||
|
||||
return await self.transport.handle_async_request(request)
|
||||
Reference in New Issue
Block a user