security: fix fail-open auth, streaming payload limit, and rate limiter memory leak
- agent.py: Fail closed when gateway lookup returns None instead of silently dropping the organization filter (cross-tenant board leak) - board_webhooks.py: Read request body via streaming chunks so an oversized payload is rejected before it is fully loaded into memory - rate_limit.py: Add periodic sweep of expired keys to prevent unbounded memory growth from inactive clients - test_rate_limit.py: Add test for the new sweep behavior Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
committed by
Abhimanyu Saharan
parent
858575cf6c
commit
4960d8561b
@@ -373,10 +373,14 @@ async def list_boards(
|
||||
# Main agents (board_id=None) must be scoped to their organization
|
||||
# via their gateway to prevent cross-tenant board leakage.
|
||||
gateway = await Gateway.objects.by_id(agent_ctx.agent.gateway_id).first(session)
|
||||
if gateway is not None:
|
||||
statement = statement.where(
|
||||
col(Board.organization_id) == gateway.organization_id,
|
||||
if gateway is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Agent gateway not found; cannot determine organization scope.",
|
||||
)
|
||||
statement = statement.where(
|
||||
col(Board.organization_id) == gateway.organization_id,
|
||||
)
|
||||
statement = statement.order_by(col(Board.created_at).desc())
|
||||
return await paginate(session, statement)
|
||||
|
||||
|
||||
@@ -501,6 +501,8 @@ async def ingest_board_webhook(
|
||||
)
|
||||
|
||||
# Enforce a 1 MB payload size limit to prevent memory exhaustion.
|
||||
# Read the body in chunks via request.stream() so an attacker cannot
|
||||
# cause OOM by sending a huge body with a missing/spoofed Content-Length.
|
||||
max_payload_bytes = 1_048_576
|
||||
content_length = request.headers.get("content-length")
|
||||
if content_length and int(content_length) > max_payload_bytes:
|
||||
@@ -508,12 +510,17 @@ async def ingest_board_webhook(
|
||||
status_code=status.HTTP_413_CONTENT_TOO_LARGE,
|
||||
detail=f"Payload exceeds maximum size of {max_payload_bytes} bytes.",
|
||||
)
|
||||
raw_body = await request.body()
|
||||
if len(raw_body) > max_payload_bytes:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_CONTENT_TOO_LARGE,
|
||||
detail=f"Payload exceeds maximum size of {max_payload_bytes} bytes.",
|
||||
)
|
||||
chunks: list[bytes] = []
|
||||
total_size = 0
|
||||
async for chunk in request.stream():
|
||||
total_size += len(chunk)
|
||||
if total_size > max_payload_bytes:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_CONTENT_TOO_LARGE,
|
||||
detail=f"Payload exceeds maximum size of {max_payload_bytes} bytes.",
|
||||
)
|
||||
chunks.append(chunk)
|
||||
raw_body = b"".join(chunks)
|
||||
_verify_webhook_signature(webhook, raw_body, request)
|
||||
|
||||
content_type = request.headers.get("content-type")
|
||||
|
||||
Reference in New Issue
Block a user