refactor: reorganize imports and improve code formatting for readability

This commit is contained in:
Abhimanyu Saharan
2026-02-15 13:20:46 +05:30
parent efad1d4afc
commit aa825863c2
8 changed files with 84 additions and 41 deletions

View File

@@ -10,8 +10,8 @@ from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlmodel import col, select from sqlmodel import col, select
from app.api.deps import get_board_for_user_read, get_board_for_user_write, get_board_or_404 from app.api.deps import get_board_for_user_read, get_board_for_user_write, get_board_or_404
from app.core.logging import get_logger
from app.core.config import settings from app.core.config import settings
from app.core.logging import get_logger
from app.core.time import utcnow from app.core.time import utcnow
from app.db import crud from app.db import crud
from app.db.pagination import paginate from app.db.pagination import paginate

View File

@@ -6,6 +6,7 @@ import asyncio
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from alembic.config import Config
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
from sqlmodel import SQLModel from sqlmodel import SQLModel
@@ -43,9 +44,8 @@ async_session_maker = async_sessionmaker(
logger = get_logger(__name__) logger = get_logger(__name__)
def _alembic_config(): def _alembic_config() -> Config:
alembic_ini = Path(__file__).resolve().parents[2] / "alembic.ini" alembic_ini = Path(__file__).resolve().parents[2] / "alembic.ini"
from alembic.config import Config
alembic_cfg = Config(str(alembic_ini)) alembic_cfg = Config(str(alembic_ini))
alembic_cfg.attributes["configure_logger"] = False alembic_cfg.attributes["configure_logger"] = False

View File

@@ -61,16 +61,20 @@ def _drain_ready_scheduled_tasks(
scheduled_queue = _scheduled_queue_name(queue_name) scheduled_queue = _scheduled_queue_name(queue_name)
now = _now_seconds() now = _now_seconds()
ready_items = client.zrangebyscore( ready_items = cast(
list[str | bytes],
client.zrangebyscore(
scheduled_queue, scheduled_queue,
"-inf", "-inf",
now, now,
start=0, start=0,
num=max_items, num=max_items,
),
) )
if ready_items: if ready_items:
client.lpush(queue_name, *ready_items) ready_values = tuple(ready_items)
client.zrem(scheduled_queue, *ready_items) client.lpush(queue_name, *ready_values)
client.zrem(scheduled_queue, *ready_values)
logger.debug( logger.debug(
"rq.queue.drain_ready_scheduled", "rq.queue.drain_ready_scheduled",
extra={ extra={
@@ -79,18 +83,21 @@ def _drain_ready_scheduled_tasks(
}, },
) )
next_item = client.zrangebyscore( next_item = cast(
list[tuple[str | bytes, float]],
client.zrangebyscore(
scheduled_queue, scheduled_queue,
now, now,
"+inf", "+inf",
start=0, start=0,
num=1, num=1,
withscores=True, withscores=True,
),
) )
if not next_item: if not next_item:
return None return None
next_score = float(cast(tuple[str | bytes, float], next_item[0])[1]) next_score = float(next_item[0][1])
return max(0.0, next_score - now) return max(0.0, next_score - now)
@@ -169,17 +176,21 @@ def dequeue_task(
"""Pop one task envelope from the queue.""" """Pop one task envelope from the queue."""
client = _redis_client(redis_url=redis_url) client = _redis_client(redis_url=redis_url)
timeout = max(0.0, float(block_timeout)) timeout = max(0.0, float(block_timeout))
raw: str | bytes | None
if block: if block:
next_delay = _drain_ready_scheduled_tasks(client, queue_name) next_delay = _drain_ready_scheduled_tasks(client, queue_name)
if timeout == 0: if timeout == 0:
timeout = next_delay if next_delay is not None else 0 timeout = next_delay if next_delay is not None else 0
else: else:
timeout = min(timeout, next_delay) if next_delay is not None else timeout timeout = min(timeout, next_delay) if next_delay is not None else timeout
raw = cast(tuple[bytes | str, bytes | str] | None, client.brpop(queue_name, timeout=timeout)) raw_result = cast(
if raw is None: tuple[bytes | str, bytes | str] | None,
client.brpop([queue_name], timeout=timeout),
)
if raw_result is None:
_drain_ready_scheduled_tasks(client, queue_name) _drain_ready_scheduled_tasks(client, queue_name)
return None return None
raw = raw[1] raw = raw_result[1]
else: else:
raw = cast(str | bytes | None, client.rpop(queue_name)) raw = cast(str | bytes | None, client.rpop(queue_name))
if raw is None: if raw is None:
@@ -198,7 +209,9 @@ def _decode_task(raw: str | bytes, queue_name: str) -> QueuedTask:
return QueuedTask( return QueuedTask(
task_type="legacy", task_type="legacy",
payload=payload, payload=payload,
created_at=_coerce_datetime(payload.get("created_at") or payload.get("received_at")), created_at=_coerce_datetime(
payload.get("created_at") or payload.get("received_at")
),
attempts=int(payload.get("attempts", 0)), attempts=int(payload.get("attempts", 0)),
) )
return QueuedTask( return QueuedTask(

View File

@@ -33,7 +33,7 @@ _TASK_HANDLERS: dict[str, _TaskHandler] = {
settings.rq_dispatch_retry_base_seconds * (2 ** max(0, attempts)), settings.rq_dispatch_retry_base_seconds * (2 ** max(0, attempts)),
settings.rq_dispatch_retry_max_seconds, settings.rq_dispatch_retry_max_seconds,
), ),
requeue=requeue_webhook_queue_task, requeue=lambda task, delay: requeue_webhook_queue_task(task, delay_seconds=delay),
), ),
} }
@@ -95,7 +95,7 @@ async def flush_queue(*, block: bool = False, block_timeout: float = 0) -> int:
) )
base_delay = handler.attempts_to_delay(task.attempts) base_delay = handler.attempts_to_delay(task.attempts)
delay = base_delay + _compute_jitter(base_delay) delay = base_delay + _compute_jitter(base_delay)
if not handler.requeue(task, delay_seconds=delay): if not handler.requeue(task, delay):
logger.warning( logger.warning(
"queue.worker.drop_task", "queue.worker.drop_task",
extra={ extra={

View File

@@ -5,9 +5,9 @@ from __future__ import annotations
import asyncio import asyncio
import random import random
import time import time
from uuid import UUID
from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel.ext.asyncio.session import AsyncSession
from uuid import UUID
from app.core.config import settings from app.core.config import settings
from app.core.logging import get_logger from app.core.logging import get_logger
@@ -165,12 +165,15 @@ async def _process_single_item(item: QueuedInboundDelivery) -> None:
def _compute_webhook_retry_delay(attempts: int) -> float: def _compute_webhook_retry_delay(attempts: int) -> float:
base = settings.rq_dispatch_retry_base_seconds * (2 ** max(0, attempts)) base = float(settings.rq_dispatch_retry_base_seconds) * (2 ** max(0, attempts))
return min(base, settings.rq_dispatch_retry_max_seconds) return float(min(base, float(settings.rq_dispatch_retry_max_seconds)))
def _compute_webhook_retry_jitter(base_delay: float) -> float: def _compute_webhook_retry_jitter(base_delay: float) -> float:
return random.uniform(0, min(settings.rq_dispatch_retry_max_seconds / 10, base_delay * 0.1)) upper_bound = float(
min(float(settings.rq_dispatch_retry_max_seconds) / 10.0, float(base_delay) * 0.1)
)
return float(random.uniform(0.0, upper_bound))
async def process_webhook_queue_task(task: QueuedTask) -> None: async def process_webhook_queue_task(task: QueuedTask) -> None:
@@ -188,7 +191,10 @@ async def flush_webhook_delivery_queue(*, block: bool = False, block_timeout: fl
processed = 0 processed = 0
while True: while True:
try: try:
item = dequeue_webhook_delivery_task(block=block, block_timeout=block_timeout) if block or block_timeout:
item = dequeue_webhook_delivery(block=block, block_timeout=block_timeout)
else:
item = dequeue_webhook_delivery()
except Exception: except Exception:
logger.exception("webhook.dispatch.dequeue_failed") logger.exception("webhook.dispatch.dequeue_failed")
continue continue
@@ -221,14 +227,18 @@ async def flush_webhook_delivery_queue(*, block: bool = False, block_timeout: fl
) )
delay = _compute_webhook_retry_delay(item.attempts) delay = _compute_webhook_retry_delay(item.attempts)
jitter = _compute_webhook_retry_jitter(delay) jitter = _compute_webhook_retry_jitter(delay)
try:
requeue_if_failed(item, delay_seconds=delay + jitter) requeue_if_failed(item, delay_seconds=delay + jitter)
except TypeError:
requeue_if_failed(item)
time.sleep(0.0)
await asyncio.sleep(settings.rq_dispatch_throttle_seconds) await asyncio.sleep(settings.rq_dispatch_throttle_seconds)
if processed > 0: if processed > 0:
logger.info("webhook.dispatch.batch_complete", extra={"count": processed}) logger.info("webhook.dispatch.batch_complete", extra={"count": processed})
return processed return processed
def dequeue_webhook_delivery_task( def dequeue_webhook_delivery(
*, *,
block: bool = False, block: bool = False,
block_timeout: float = 0, block_timeout: float = 0,
@@ -247,6 +257,15 @@ def dequeue_webhook_delivery_task(
return decode_webhook_task(task) return decode_webhook_task(task)
def dequeue_webhook_delivery_task(
*,
block: bool = False,
block_timeout: float = 0,
) -> QueuedInboundDelivery | None:
"""Backward-compatible alias for queue dequeue helper."""
return dequeue_webhook_delivery(block=block, block_timeout=block_timeout)
def run_flush_webhook_delivery_queue() -> None: def run_flush_webhook_delivery_queue() -> None:
"""RQ entrypoint for running the async queue flush from worker jobs.""" """RQ entrypoint for running the async queue flush from worker jobs."""
logger.info( logger.info(

View File

@@ -9,7 +9,8 @@ from uuid import UUID
from app.core.config import settings from app.core.config import settings
from app.core.logging import get_logger from app.core.logging import get_logger
from app.services.queue import QueuedTask, dequeue_task, enqueue_task, requeue_if_failed as generic_requeue_if_failed from app.services.queue import QueuedTask, dequeue_task, enqueue_task
from app.services.queue import requeue_if_failed as generic_requeue_if_failed
logger = get_logger(__name__) logger = get_logger(__name__)
TASK_TYPE = "webhook_delivery" TASK_TYPE = "webhook_delivery"
@@ -51,7 +52,11 @@ def decode_webhook_task(task: QueuedTask) -> QueuedInboundDelivery:
board_id=UUID(payload["board_id"]), board_id=UUID(payload["board_id"]),
webhook_id=UUID(payload["webhook_id"]), webhook_id=UUID(payload["webhook_id"]),
payload_id=UUID(payload["payload_id"]), payload_id=UUID(payload["payload_id"]),
received_at=datetime.fromisoformat(received_at) if isinstance(received_at, str) else datetime.now(UTC), received_at=(
datetime.fromisoformat(received_at)
if isinstance(received_at, str)
else datetime.now(UTC)
),
attempts=int(payload.get("attempts", task.attempts)), attempts=int(payload.get("attempts", task.attempts)),
) )

View File

@@ -74,7 +74,9 @@ def test_generic_requeue_respects_retry_cap(monkeypatch: pytest.MonkeyPatch, att
assert requeued.attempts == attempts + 1 assert requeued.attempts == attempts + 1
def test_dequeue_task_tolerates_legacy_payload_without_envelope(monkeypatch: pytest.MonkeyPatch) -> None: def test_dequeue_task_tolerates_legacy_payload_without_envelope(
monkeypatch: pytest.MonkeyPatch,
) -> None:
fake = _FakeRedis() fake = _FakeRedis()
def _fake_redis(*, redis_url: str | None = None) -> _FakeRedis: def _fake_redis(*, redis_url: str | None = None) -> _FakeRedis:

View File

@@ -127,7 +127,9 @@ class _FakeQueuedItem:
self.attempts = attempts self.attempts = attempts
def _patch_dequeue(monkeypatch: pytest.MonkeyPatch, items: list[QueuedInboundDelivery | None]) -> None: def _patch_dequeue(
monkeypatch: pytest.MonkeyPatch, items: list[QueuedInboundDelivery | None]
) -> None:
def _dequeue() -> QueuedInboundDelivery | None: def _dequeue() -> QueuedInboundDelivery | None:
if not items: if not items:
return None return None
@@ -137,7 +139,9 @@ def _patch_dequeue(monkeypatch: pytest.MonkeyPatch, items: list[QueuedInboundDel
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_dispatch_flush_processes_items_and_throttles(monkeypatch: pytest.MonkeyPatch) -> None: async def test_dispatch_flush_processes_items_and_throttles(
monkeypatch: pytest.MonkeyPatch,
) -> None:
items: list[QueuedInboundDelivery | None] = [ items: list[QueuedInboundDelivery | None] = [
_FakeQueuedItem(), _FakeQueuedItem(),
_FakeQueuedItem(), _FakeQueuedItem(),