Compare commits

..

12 Commits

Author SHA1 Message Date
8d593fa750 Фикс ЮРЛа для публикации библиотеки
All checks were successful
Build And Publish Package / publish (push) Successful in 38s
2025-12-10 13:26:06 +03:00
b752ccec5a Фикс публицирования библиотеки в CI/CD
Some checks failed
Build And Publish Package / publish (push) Failing after 33s
2025-12-10 13:19:33 +03:00
8336f5886f Фикс названия и библиотек CI/CD
Some checks failed
Build And Publish Package / publish (push) Failing after 47s
2025-12-10 13:16:23 +03:00
4b50bcc051 Фикс CI/CD
Some checks failed
Build And Push / publish (push) Failing after 16s
2025-12-10 13:15:04 +03:00
7779c9d97b Правки и публицирование пакета 2025-12-10 13:13:48 +03:00
064c20f0b1 Фикс типизации 2025-11-26 13:37:30 +03:00
3eaa68c559 Фикс рейт лимита 2025-11-23 17:07:40 +03:00
e8a0ab0f41 Обновлено README 2025-11-21 22:45:42 +03:00
4dd1e4b1a6 Redis клиент сменен с синхронного на ассинхронный 2025-11-06 13:54:22 +03:00
45af5e247c Добавлена функция для чистки параметров запроса 2025-10-28 14:51:14 +03:00
9ab9bcfc96 Форматирование 2025-10-27 22:06:00 +03:00
33f25c799c Версия 1.0.0 2025-10-27 22:05:46 +03:00
8 changed files with 274 additions and 23 deletions

View File

@ -0,0 +1,64 @@
name: Build And Publish Package
run-name: ${{ github.actor }} builds and publishes package to PyPI
on:
push:
branches:
- latest
jobs:
publish:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Cache uv binary
uses: actions/cache@v4
with:
path: ${{ github.workspace }}/uv
key: uv-${{ runner.os }}
restore-keys: uv-${{ runner.os }}
- name: Cache uv dependencies
uses: actions/cache@v4
with:
path: ${{ github.workspace }}/.cache/uv
key: uv-${{ runner.os }}
restore-keys: uv-${{ runner.os }}
- name: Cache pre-commit
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit-cache-${{ runner.os }}-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: pre-commit-cache-${{ runner.os }}-
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
version: "0.7.8"
enable-cache: true
cache-local-path: ${{ github.workspace }}/.cache/uv
tool-dir: ${{ github.workspace }}/.cache/uv
tool-bin-dir: ${{ github.workspace }}/.cache/uv
cache-dependency-glob: ""
- name: Set up Python
run: uv python install
- name: Install the project
run: uv sync --all-extras --no-install-project --cache-dir ${{ github.workspace }}/.cache/uv
- name: Linter & Formatter
run: uv run pre-commit run --all-files
- name: Build Package
run: uv build --cache-dir ${{ github.workspace }}/.cache/uv
- name: Publish to Gitea PyPI
run: |
uv publish \
--index aiohttpx \
--token ${{ secrets.CI_TOKEN }}

117
README.md
View File

@ -0,0 +1,117 @@
# aiohttpx
## Description
**aiohttpx** is an asynchronous HTTP client that merges the ergonomics and powerful API of [httpx](https://github.com/encode/httpx) with the high-performance transport layer of [aiohttp](https://github.com/aio-libs/aiohttp).
It also provides optional Redis-powered caching and rate-limiting to enable efficient, production-grade request handling with minimal setup.
## Features
* Fully asynchronous HTTP client using **aiohttp** as the transport.
* Optional **Redis-based caching** to reduce redundant API calls.
* Optional **Redis-based rate limiting** to control request throughput.
* Familiar API interface inspired by **httpx**.
## Requirements
* Python 3.13 or higher
* Redis server (if using caching or rate limiting)
## Installation
### Using `uv` Tool
This project supports dependency management via the [uv tool](https://github.com/astral-sh/uv).
To set up the project:
1. **Install uv**
```bash
curl -LsSf https://astral.sh/uv/install.sh | sh
```
2. **Add to the repository**
```bash
uv add https://git.meowly.ru/Miwory/aiohttpx.git
```
## Configuration
aiohttpx supports several optional parameters for caching and rate limiting:
### `key` — Redis prefix
A string used as the **Redis key namespace** for all cache and rate-limit entries.
This allows multiple clients or services to share the same Redis instance without collisions.
### `limit` — Rate limit
The maximum number of requests allowed **per second** for this client.
This value is enforced using Redis, making it safe to use across distributed systems.
### `X-Cache-TTL` — Enable caching for a request
To enable caching for a specific request, include the header:
```text
X-Cache-TTL: <seconds>
```
Example:
```python
response = await client.get(
"/users",
headers={"X-Cache-TTL": "60"}, # cache this endpoint for 60 seconds
)
```
If this header is present and Redis is configured, the response will be cached for the specified duration.
## Usage
### Basic Example
```python
from aiohttpx.client import AioHTTPXClient
class TwitchAPIClient(AioHTTPXClient):
def __init__(
self,
redis_url: str,
client_id: str,
client_secret: str,
redirect_uri: str,
):
self.base_uri = 'https://api.twitch.tv/helix'
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
super().__init__(
base_url=self.base_uri,
headers={'Client-Id': self.client_id},
redis_url=redis_url,
key='twitch', # Redis prefix
limit=10, # 10 requests per second
logger='Twitch API',
)
async def test_endpoint(self):
...
```
## Linting and Pre-commit Checks
This project uses `pre-commit` and `ruff` for linting and formatting.
Run the linting process with:
```bash
poe lint
```
## License
This project is licensed under the MIT License.
See `LICENSE` for details.

View File

@ -1,6 +1,6 @@
[project] [project]
name = "aiohttpx" name = "aiohttpx"
version = "0.3.0" version = "1.3.1"
description = "Custom HTTPX client with aiohttp transport, rate limiter and caching" description = "Custom HTTPX client with aiohttp transport, rate limiter and caching"
readme = "README.md" readme = "README.md"
authors = [ authors = [
@ -8,10 +8,10 @@ authors = [
] ]
requires-python = ">=3.13" requires-python = ">=3.13"
dependencies = [ dependencies = [
"aiohttp==3.13.1", "aiohttp[speedups]>=3.13,<=3.14",
"httpx==0.28.1", "httpx>=0.28,<=0.29",
"orjson==3.11.4", "orjson>=3.11,<=3.12",
"redis[hiredis]==7.0.0", "redis[hiredis]>=7.0,<=7.1",
] ]
[project.optional-dependencies] [project.optional-dependencies]
@ -23,6 +23,12 @@ dev = [
"types-redis==4.6.0.20241004", "types-redis==4.6.0.20241004",
] ]
[[tool.uv.index]]
name = "aiohttpx"
url = "https://git.miwory.dev/api/packages/Miwory/pypi/simple"
publish-url = "https://git.miwory.dev/api/packages/Miwory/pypi"
explicit = true
[tool.poe.tasks] [tool.poe.tasks]
_git = "git add ." _git = "git add ."
_lint = "pre-commit run --all-files" _lint = "pre-commit run --all-files"

View File

@ -1 +1 @@
__version__: str = '0.3.0' __version__: str = '1.1.0'

View File

@ -1,7 +1,7 @@
from collections.abc import Callable, Mapping from collections.abc import Callable, Mapping
from logging import getLogger from logging import getLogger
from ssl import SSLContext from ssl import SSLContext
from typing import Any from typing import Any, TypeVar
from httpx import URL, Limits from httpx import URL, Limits
from httpx import AsyncClient as AsyncHTTPXClient from httpx import AsyncClient as AsyncHTTPXClient
@ -10,6 +10,8 @@ from httpx import _types as t # type: ignore
from aiohttpx.transports.cache import AsyncCacheTransport from aiohttpx.transports.cache import AsyncCacheTransport
K = TypeVar('K')
class AioHTTPXClient(AsyncHTTPXClient): class AioHTTPXClient(AsyncHTTPXClient):
def __init__( def __init__(
@ -33,6 +35,7 @@ class AioHTTPXClient(AsyncHTTPXClient):
redis_url: str | None = None, redis_url: str | None = None,
key: str | None = None, key: str | None = None,
limit: int | None = None, limit: int | None = None,
logger: str | None = __name__,
) -> None: ) -> None:
super().__init__( super().__init__(
auth=auth, auth=auth,
@ -56,7 +59,10 @@ class AioHTTPXClient(AsyncHTTPXClient):
default_encoding=default_encoding, default_encoding=default_encoding,
) )
self.logger = getLogger(__name__) self.logger = getLogger(logger)
def clean_dict[K, V](self, params: dict[K, Any | None]):
return {k: v for k, v in params.items() if v is not None}
__all__ = ['AioHTTPXClient'] __all__ = ['AioHTTPXClient']

56
src/aiohttpx/status.py Normal file
View File

@ -0,0 +1,56 @@
CONTINUE = 100
SWITCHING_PROTOCOLS = 101
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTIPLE_CHOICES = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
SWITCH_PROXY = 306
TEMPORARY_REDIRECT = 307
PERMANENT_REDIRECT = 308
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTHENTICATION_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
REQUEST_ENTITY_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
IM_A_TEAPOT = 418
MISDIRECTED_REQUEST = 422
UNPROCESSABLE_ENTITY = 422
LOCKED = 423
FAILED_DEPENDENCY = 424
UPGRADE_REQUIRED = 426
PRECONDITION_REQUIRED = 428
TOO_MANY_REQUESTS = 429
REQUEST_HEADER_FIELDS_TOO_LARGE = 431
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
VARIANT_ALSO_NEGOTIATES = 506
INSUFFICIENT_STORAGE = 507
LOOP_DETECTED = 508
NOT_EXTENDED = 510
NETWORK_AUTHENTICATION_REQUIRED = 511

View File

@ -2,9 +2,10 @@ from httpx import Request
from httpx import Response as HTTPXResponse from httpx import Response as HTTPXResponse
from httpx import _models as m # type: ignore from httpx import _models as m # type: ignore
from orjson import dumps, loads from orjson import dumps, loads
from redis.asyncio import Redis
from aiohttpx.responses import Response from aiohttpx.responses import Response
from aiohttpx.transports.rate_limiter import AsyncRateLimit, Redis from aiohttpx.transports.rate_limiter import AsyncRateLimit
def generate_cache_key(request: Request) -> str: def generate_cache_key(request: Request) -> str:
@ -15,8 +16,8 @@ def generate_cache_key(request: Request) -> str:
return f'cache:{hash(str(dumps(request_data)))}' return f'cache:{hash(str(dumps(request_data)))}'
def cache_response( async def cache_response(
client: Redis[bytes], client: 'Redis[bytes]',
cache_key: str, cache_key: str,
request: Request, request: Request,
response: Response | HTTPXResponse, response: Response | HTTPXResponse,
@ -25,7 +26,7 @@ def cache_response(
ttl = get_ttl_from_headers(request.headers) ttl = get_ttl_from_headers(request.headers)
if ttl: if ttl:
client.set(cache_key, serialized_response, ex=ttl) await client.set(cache_key, serialized_response, ex=ttl)
def get_ttl_from_headers(headers: m.Headers) -> int | None: def get_ttl_from_headers(headers: m.Headers) -> int | None:
@ -36,10 +37,10 @@ def get_ttl_from_headers(headers: m.Headers) -> int | None:
return None return None
def get_cached_response( async def get_cached_response(
client: Redis[bytes], cache_key: str client: 'Redis[bytes]', cache_key: str
) -> Response | None: ) -> Response | None:
cached_data = client.get(cache_key) cached_data = await client.get(cache_key)
if cached_data: if cached_data:
return deserialize_response(cached_data) return deserialize_response(cached_data)
@ -83,13 +84,13 @@ class AsyncCacheTransport(AsyncRateLimit):
return await self.transport.handle_async_request(request) return await self.transport.handle_async_request(request)
cache_key = generate_cache_key(request) cache_key = generate_cache_key(request)
cached_response = get_cached_response(self.client, cache_key) cached_response = await get_cached_response(self.client, cache_key)
if cached_response: if cached_response:
return cached_response return cached_response
response = await self.transport.handle_async_request(request) response = await self.transport.handle_async_request(request)
cache_response(self.client, cache_key, request, response) await cache_response(self.client, cache_key, request, response)
return response return response

View File

@ -1,7 +1,7 @@
from asyncio import sleep as async_sleep from asyncio import sleep as async_sleep
from httpx import Request from httpx import Request
from redis import Redis from redis.asyncio import Redis
from aiohttpx.responses import Response from aiohttpx.responses import Response
from aiohttpx.transports.aio import AiohttpTransport from aiohttpx.transports.aio import AiohttpTransport
@ -9,7 +9,7 @@ from aiohttpx.transports.aio import AiohttpTransport
class AsyncRateLimit(AiohttpTransport): class AsyncRateLimit(AiohttpTransport):
def __init__( def __init__(
self, redis: Redis[bytes] | None, key: str | None, limit: int | None self, redis: 'Redis[bytes] | None', key: str | None, limit: int | None
) -> None: ) -> None:
self.transport = AiohttpTransport() self.transport = AiohttpTransport()
self.client = redis self.client = redis
@ -27,17 +27,18 @@ class AsyncRateLimit(AiohttpTransport):
async def request_is_limited(self) -> bool: async def request_is_limited(self) -> bool:
if self.client and self.key and self.limit: if self.client and self.key and self.limit:
t: int = int(self.client.time()[0]) # type: ignore time = await self.client.time() # type: ignore
t: int = int(time[0]) # type: ignore
separation = round(60 / self.limit) separation = round(60 / self.limit)
value = self.client.get(self.key) or t value = await self.client.get(self.key) or t
self.client.setnx(self.key, value) await self.client.setnx(self.key, value)
tat = max(int(value), t) tat = max(int(value), t)
if tat - t <= 60 - separation: if tat - t <= 60 - separation:
new_tat = max(tat, t) + separation new_tat = max(tat, t) + separation
self.client.set(self.key, new_tat) await self.client.set(self.key, new_tat)
return False return False
return True return True