Compare commits

..

8 Commits

Author SHA1 Message Date
8336f5886f Фикс названия и библиотек CI/CD
Some checks are pending
Build And Publish Package / publish (push) Has started running
2025-12-10 13:16:23 +03:00
4b50bcc051 Фикс CI/CD
Some checks failed
Build And Push / publish (push) Failing after 16s
2025-12-10 13:15:04 +03:00
7779c9d97b Правки и публицирование пакета 2025-12-10 13:13:48 +03:00
064c20f0b1 Фикс типизации 2025-11-26 13:37:30 +03:00
3eaa68c559 Фикс рейт лимита 2025-11-23 17:07:40 +03:00
e8a0ab0f41 Обновлено README 2025-11-21 22:45:42 +03:00
4dd1e4b1a6 Redis клиент сменен с синхронного на ассинхронный 2025-11-06 13:54:22 +03:00
45af5e247c Добавлена функция для чистки параметров запроса 2025-10-28 14:51:14 +03:00
7 changed files with 212 additions and 26 deletions

View File

@ -0,0 +1,65 @@
name: Build And Publish Package
run-name: ${{ github.actor }} builds and publishes package to PyPI
on:
push:
branches:
- latest
jobs:
publish:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Cache uv binary
uses: actions/cache@v4
with:
path: ${{ github.workspace }}/uv
key: uv-${{ runner.os }}
restore-keys: uv-${{ runner.os }}
- name: Cache uv dependencies
uses: actions/cache@v4
with:
path: ${{ github.workspace }}/.cache/uv
key: uv-${{ runner.os }}
restore-keys: uv-${{ runner.os }}
- name: Cache pre-commit
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit-cache-${{ runner.os }}-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: pre-commit-cache-${{ runner.os }}-
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
version: "0.7.8"
enable-cache: true
cache-local-path: ${{ github.workspace }}/.cache/uv
tool-dir: ${{ github.workspace }}/.cache/uv
tool-bin-dir: ${{ github.workspace }}/.cache/uv
cache-dependency-glob: ""
- name: Set up Python
run: uv python install
- name: Install the project
run: uv sync --all-extras --no-install-project --cache-dir ${{ github.workspace }}/.cache/uv
- name: Linter & Formatter
run: uv run pre-commit run --all-files
- name: Build Package
run: uv build --cache-dir ${{ github.workspace }}/.cache/uv
- name: Publish to Gitea PyPI
run: |
uv publish \
--index aiohttpx \
--username ${{ secrets.CI_USERNAME }} \
--token ${{ secrets.CI_TOKEN }}

116
README.md
View File

@ -1,9 +1,117 @@
# aiohttpx
aiohttpx is a HTTP client built on top of the [httpx](https://github.com/encode/httpx) and [aiohttp](https://github.com/aio-libs/aiohttp) libraries.
## Description
**aiohttpx** is an asynchronous HTTP client that merges the ergonomics and powerful API of [httpx](https://github.com/encode/httpx) with the high-performance transport layer of [aiohttp](https://github.com/aio-libs/aiohttp).
It also provides optional Redis-powered caching and rate-limiting to enable efficient, production-grade request handling with minimal setup.
## Features
* Fully asynchronous using aiohttp as the transport
* Supports caching using Redis as the backend
* Supports rate limiting using Redis as the backend
* Fully asynchronous HTTP client using **aiohttp** as the transport.
* Optional **Redis-based caching** to reduce redundant API calls.
* Optional **Redis-based rate limiting** to control request throughput.
* Familiar API interface inspired by **httpx**.
## Requirements
* Python 3.13 or higher
* Redis server (if using caching or rate limiting)
## Installation
### Using `uv` Tool
This project supports dependency management via the [uv tool](https://github.com/astral-sh/uv).
To set up the project:
1. **Install uv**
```bash
curl -LsSf https://astral.sh/uv/install.sh | sh
```
2. **Add to the repository**
```bash
uv add https://git.meowly.ru/Miwory/aiohttpx.git
```
## Configuration
aiohttpx supports several optional parameters for caching and rate limiting:
### `key` — Redis prefix
A string used as the **Redis key namespace** for all cache and rate-limit entries.
This allows multiple clients or services to share the same Redis instance without collisions.
### `limit` — Rate limit
The maximum number of requests allowed **per second** for this client.
This value is enforced using Redis, making it safe to use across distributed systems.
### `X-Cache-TTL` — Enable caching for a request
To enable caching for a specific request, include the header:
```text
X-Cache-TTL: <seconds>
```
Example:
```python
response = await client.get(
"/users",
headers={"X-Cache-TTL": "60"}, # cache this endpoint for 60 seconds
)
```
If this header is present and Redis is configured, the response will be cached for the specified duration.
## Usage
### Basic Example
```python
from aiohttpx.client import AioHTTPXClient
class TwitchAPIClient(AioHTTPXClient):
def __init__(
self,
redis_url: str,
client_id: str,
client_secret: str,
redirect_uri: str,
):
self.base_uri = 'https://api.twitch.tv/helix'
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
super().__init__(
base_url=self.base_uri,
headers={'Client-Id': self.client_id},
redis_url=redis_url,
key='twitch', # Redis prefix
limit=10, # 10 requests per second
logger='Twitch API',
)
async def test_endpoint(self):
...
```
## Linting and Pre-commit Checks
This project uses `pre-commit` and `ruff` for linting and formatting.
Run the linting process with:
```bash
poe lint
```
## License
This project is licensed under the MIT License.
See `LICENSE` for details.

View File

@ -1,6 +1,6 @@
[project]
name = "aiohttpx"
version = "1.0.0"
version = "1.3.1"
description = "Custom HTTPX client with aiohttp transport, rate limiter and caching"
readme = "README.md"
authors = [
@ -8,10 +8,10 @@ authors = [
]
requires-python = ">=3.13"
dependencies = [
"aiohttp==3.13.1",
"httpx==0.28.1",
"orjson==3.11.4",
"redis[hiredis]==7.0.0",
"aiohttp[speedups]>=3.13,<=3.14",
"httpx>=0.28,<=0.29",
"orjson>=3.11,<=3.12",
"redis[hiredis]>=7.0,<=7.1",
]
[project.optional-dependencies]
@ -23,6 +23,12 @@ dev = [
"types-redis==4.6.0.20241004",
]
[[tool.uv.index]]
name = "aiohttpx"
url = "https://git.miwory.dev/api/packages/Miwory/pypi/simple"
publish-url = "https://git.miwory.dev/api/packages/Miwory/pypi/legacy"
explicit = true
[tool.poe.tasks]
_git = "git add ."
_lint = "pre-commit run --all-files"

View File

@ -1 +1 @@
__version__: str = '1.0.0'
__version__: str = '1.1.0'

View File

@ -1,7 +1,7 @@
from collections.abc import Callable, Mapping
from logging import getLogger
from ssl import SSLContext
from typing import Any
from typing import Any, TypeVar
from httpx import URL, Limits
from httpx import AsyncClient as AsyncHTTPXClient
@ -10,6 +10,8 @@ from httpx import _types as t # type: ignore
from aiohttpx.transports.cache import AsyncCacheTransport
K = TypeVar('K')
class AioHTTPXClient(AsyncHTTPXClient):
def __init__(
@ -59,5 +61,8 @@ class AioHTTPXClient(AsyncHTTPXClient):
self.logger = getLogger(logger)
def clean_dict[K, V](self, params: dict[K, Any | None]):
return {k: v for k, v in params.items() if v is not None}
__all__ = ['AioHTTPXClient']

View File

@ -2,9 +2,10 @@ from httpx import Request
from httpx import Response as HTTPXResponse
from httpx import _models as m # type: ignore
from orjson import dumps, loads
from redis.asyncio import Redis
from aiohttpx.responses import Response
from aiohttpx.transports.rate_limiter import AsyncRateLimit, Redis
from aiohttpx.transports.rate_limiter import AsyncRateLimit
def generate_cache_key(request: Request) -> str:
@ -15,8 +16,8 @@ def generate_cache_key(request: Request) -> str:
return f'cache:{hash(str(dumps(request_data)))}'
def cache_response(
client: Redis[bytes],
async def cache_response(
client: 'Redis[bytes]',
cache_key: str,
request: Request,
response: Response | HTTPXResponse,
@ -25,7 +26,7 @@ def cache_response(
ttl = get_ttl_from_headers(request.headers)
if ttl:
client.set(cache_key, serialized_response, ex=ttl)
await client.set(cache_key, serialized_response, ex=ttl)
def get_ttl_from_headers(headers: m.Headers) -> int | None:
@ -36,10 +37,10 @@ def get_ttl_from_headers(headers: m.Headers) -> int | None:
return None
def get_cached_response(
client: Redis[bytes], cache_key: str
async def get_cached_response(
client: 'Redis[bytes]', cache_key: str
) -> Response | None:
cached_data = client.get(cache_key)
cached_data = await client.get(cache_key)
if cached_data:
return deserialize_response(cached_data)
@ -83,13 +84,13 @@ class AsyncCacheTransport(AsyncRateLimit):
return await self.transport.handle_async_request(request)
cache_key = generate_cache_key(request)
cached_response = get_cached_response(self.client, cache_key)
cached_response = await get_cached_response(self.client, cache_key)
if cached_response:
return cached_response
response = await self.transport.handle_async_request(request)
cache_response(self.client, cache_key, request, response)
await cache_response(self.client, cache_key, request, response)
return response

View File

@ -1,7 +1,7 @@
from asyncio import sleep as async_sleep
from httpx import Request
from redis import Redis
from redis.asyncio import Redis
from aiohttpx.responses import Response
from aiohttpx.transports.aio import AiohttpTransport
@ -9,7 +9,7 @@ from aiohttpx.transports.aio import AiohttpTransport
class AsyncRateLimit(AiohttpTransport):
def __init__(
self, redis: Redis[bytes] | None, key: str | None, limit: int | None
self, redis: 'Redis[bytes] | None', key: str | None, limit: int | None
) -> None:
self.transport = AiohttpTransport()
self.client = redis
@ -27,17 +27,18 @@ class AsyncRateLimit(AiohttpTransport):
async def request_is_limited(self) -> bool:
if self.client and self.key and self.limit:
t: int = int(self.client.time()[0]) # type: ignore
time = await self.client.time() # type: ignore
t: int = int(time[0]) # type: ignore
separation = round(60 / self.limit)
value = self.client.get(self.key) or t
self.client.setnx(self.key, value)
value = await self.client.get(self.key) or t
await self.client.setnx(self.key, value)
tat = max(int(value), t)
if tat - t <= 60 - separation:
new_tat = max(tat, t) + separation
self.client.set(self.key, new_tat)
await self.client.set(self.key, new_tat)
return False
return True