1 Commits

Author SHA1 Message Date
Abhimanyu Saharan
f9697a8ebd fix(frontend): prevent table horizontal scroll on mobile by default 2026-02-12 08:16:23 +00:00
308 changed files with 2711 additions and 8033 deletions

View File

@@ -1,35 +0,0 @@
## Task / context
- Mission Control task: <link or id>
- Why: <what problem this PR solves>
## Scope
- <bullet 1>
- <bullet 2>
### Out of scope
- <explicitly list what is NOT included>
## Evidence / validation
- [ ] `make check` (or explain what you ran instead)
- [ ] E2E (if applicable): <cypress run / screenshots>
- Logs/links:
- <link to CI run>
## Screenshots (UI changes)
| Desktop | Mobile |
| --- | --- |
| <img src="..." width="600" /> | <img src="..." width="300" /> |
## Docs impact
- [ ] No user/operator docs changes required
- [ ] Docs updated: <paths/links>
## Risk / rollout notes
- Risk level: low / medium / high
- Rollback plan (if needed): <steps>
## Checklist
- [ ] Branch created from `origin/master` (no unrelated commits)
- [ ] PR is focused (one theme)
- [ ] No secrets in code/logs/docs
- [ ] If API/behavior changes: docs updated (OpenAPI + `docs/reference/api.md`)

View File

@@ -39,7 +39,6 @@ jobs:
key: uv-${{ runner.os }}-${{ hashFiles('backend/uv.lock') }}
- name: Set up Node
id: setup-node
uses: actions/setup-node@v4
with:
node-version: "22"
@@ -52,16 +51,6 @@ jobs:
- name: Install frontend dependencies
run: make frontend-sync
- name: Cache Next.js build cache
uses: actions/cache@v4
with:
path: |
frontend/.next/cache
key: nextjs-${{ runner.os }}-node-${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('frontend/package-lock.json') }}
restore-keys: |
nextjs-${{ runner.os }}-node-${{ steps.setup-node.outputs.node-version }}-
- name: Run backend checks
env:
# Keep CI builds deterministic.
@@ -87,11 +76,6 @@ jobs:
make frontend-test
make frontend-build
- name: Docs quality gates (lint + relative link check)
run: |
make docs-check
- name: Upload coverage artifacts
if: always()
uses: actions/upload-artifact@v4
@@ -111,7 +95,6 @@ jobs:
uses: actions/checkout@v4
- name: Set up Node
id: setup-node
uses: actions/setup-node@v4
with:
node-version: "22"
@@ -121,16 +104,6 @@ jobs:
- name: Install frontend dependencies
run: make frontend-sync
- name: Cache Next.js build cache
uses: actions/cache@v4
with:
path: |
frontend/.next/cache
key: nextjs-${{ runner.os }}-node-${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('frontend/package-lock.json') }}
restore-keys: |
nextjs-${{ runner.os }}-node-${{ steps.setup-node.outputs.node-version }}-
- name: Start frontend (dev server)
env:
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}

View File

@@ -1,23 +0,0 @@
# markdownlint-cli2 config
# Keep the ruleset intentionally tiny to avoid noisy churn.
config:
default: false
MD009: true # no trailing spaces
MD010: true # no hard tabs
MD012: true # no multiple consecutive blank lines
MD047: true # single trailing newline
globs:
- "**/*.md"
ignores:
- "**/node_modules/**"
- "**/.next/**"
- "**/dist/**"
- "**/build/**"
- "**/.venv/**"
- "**/__pycache__/**"
- "**/.pytest_cache/**"
- "**/.mypy_cache/**"
- "**/coverage/**"

View File

@@ -25,37 +25,13 @@ When opening an issue, please include:
## Pull requests
### Branching hygiene (required)
Create feature branches from the latest `origin/master` to avoid unrelated commits in PRs:
```bash
git fetch origin
git checkout master
git reset --hard origin/master
git checkout -b <branch-name>
```
If you accidentally based your branch off another feature branch, fix it by cherry-picking the intended commits onto a clean branch and force-pushing the corrected branch (or opening a new PR).
### Expectations
- Open a PR as **Draft** early (especially for multi-step work) so progress is visible and local changes arent lost.
- Keep PRs **small and focused** when possible.
- Include a clear description of the change and why its needed.
- Add/adjust tests when behavior changes.
- Update docs when contributor-facing or operator-facing behavior changes.
### Draft PR policy
- Start as **Draft** when:
- the PR is incomplete,
- CI is expected to fail,
- or you need early feedback on direction.
- Mark **Ready for review** when:
- `make check` is green locally (or youve explained why it cant be), and
- the description includes repro/verification notes.
### Local checks
From repo root, the closest “CI parity” command is:

View File

@@ -122,15 +122,3 @@ backend-templates-sync: ## Sync templates to existing gateway agents (usage: mak
.PHONY: check
check: lint typecheck backend-coverage frontend-test build ## Run lint + typecheck + tests + coverage + build
.PHONY: docs-lint
docs-lint: frontend-tooling ## Lint markdown files (tiny ruleset; avoids noisy churn)
$(NODE_WRAP) npx markdownlint-cli2@0.15.0 --config .markdownlint-cli2.yaml "**/*.md"
.PHONY: docs-link-check
docs-link-check: ## Check for broken relative links in markdown docs
python scripts/check_markdown_links.py
.PHONY: docs-check
docs-check: docs-lint docs-link-check ## Run all docs quality gates

View File

@@ -112,15 +112,6 @@ make setup
make check
```
## Contributing / PR workflow
- Contribution guide: [`CONTRIBUTING.md`](./CONTRIBUTING.md)
- Preferred flow: open a **Draft PR early**, then iterate until `make check` is green.
## Releasing
- Maintainer checklist: [Release checklist](./docs/release/README.md)
## License
This project is licensed under the MIT License. See [`LICENSE`](./LICENSE).

View File

@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, Any
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy import func
from sqlmodel import SQLModel, col, select
from app.api import agents as agents_api
@@ -20,7 +19,6 @@ from app.db.pagination import paginate
from app.db.session import get_session
from app.models.agents import Agent
from app.models.boards import Board
from app.models.tags import Tag
from app.models.task_dependencies import TaskDependency
from app.models.tasks import Task
from app.schemas.agents import (
@@ -44,13 +42,11 @@ from app.schemas.gateway_coordination import (
GatewayMainAskUserResponse,
)
from app.schemas.pagination import DefaultLimitOffsetPage
from app.schemas.tags import TagRef
from app.schemas.tasks import TaskCommentCreate, TaskCommentRead, TaskCreate, TaskRead, TaskUpdate
from app.services.activity_log import record_activity
from app.services.openclaw.coordination_service import GatewayCoordinationService
from app.services.openclaw.policies import OpenClawAuthorizationPolicy
from app.services.openclaw.provisioning_db import AgentLifecycleService
from app.services.tags import replace_tags, validate_tag_ids
from app.services.task_dependencies import (
blocked_by_dependency_ids,
dependency_status_by_id,
@@ -214,32 +210,6 @@ async def list_tasks(
)
@router.get("/boards/{board_id}/tags", response_model=list[TagRef])
async def list_tags(
board: Board = BOARD_DEP,
session: AsyncSession = SESSION_DEP,
agent_ctx: AgentAuthContext = AGENT_CTX_DEP,
) -> list[TagRef]:
"""List tags available to the board's organization."""
_guard_board_access(agent_ctx, board)
tags = (
await session.exec(
select(Tag)
.where(col(Tag.organization_id) == board.organization_id)
.order_by(func.lower(col(Tag.name)).asc(), col(Tag.created_at).asc()),
)
).all()
return [
TagRef(
id=tag.id,
name=tag.name,
slug=tag.slug,
color=tag.color,
)
for tag in tags
]
@router.post("/boards/{board_id}/tasks", response_model=TaskRead)
async def create_task(
payload: TaskCreate,
@@ -250,9 +220,8 @@ async def create_task(
"""Create a task on the board as the lead agent."""
_guard_board_access(agent_ctx, board)
_require_board_lead(agent_ctx)
data = payload.model_dump(exclude={"depends_on_task_ids", "tag_ids"})
data = payload.model_dump(exclude={"depends_on_task_ids"})
depends_on_task_ids = list(payload.depends_on_task_ids)
tag_ids = list(payload.tag_ids)
task = Task.model_validate(data)
task.board_id = board.id
@@ -265,11 +234,6 @@ async def create_task(
task_id=task.id,
depends_on_task_ids=depends_on_task_ids,
)
normalized_tag_ids = await validate_tag_ids(
session,
organization_id=board.organization_id,
tag_ids=tag_ids,
)
dep_status = await dependency_status_by_id(
session,
board_id=board.id,
@@ -310,11 +274,6 @@ async def create_task(
depends_on_task_id=dep_id,
),
)
await replace_tags(
session,
task_id=task.id,
tag_ids=normalized_tag_ids,
)
await session.commit()
await session.refresh(task)
record_activity(
@@ -336,10 +295,12 @@ async def create_task(
task=task,
agent=assigned_agent,
)
return await tasks_api._task_read_response(
session,
task=task,
board_id=board.id,
return TaskRead.model_validate(task, from_attributes=True).model_copy(
update={
"depends_on_task_ids": normalized_deps,
"blocked_by_task_ids": blocked_by,
"is_blocked": bool(blocked_by),
},
)

View File

@@ -26,15 +26,12 @@ from app.db.pagination import paginate
from app.db.session import async_session_maker, get_session
from app.models.agents import Agent
from app.models.approvals import Approval
from app.models.tasks import Task
from app.schemas.approvals import ApprovalCreate, ApprovalRead, ApprovalStatus, ApprovalUpdate
from app.schemas.pagination import DefaultLimitOffsetPage
from app.services.activity_log import record_activity
from app.services.approval_task_links import (
load_task_ids_by_approval,
lock_tasks_for_approval,
normalize_task_ids,
pending_approval_conflicts_by_task,
replace_approval_task_links,
task_counts_for_board,
)
@@ -97,36 +94,10 @@ async def _approval_task_ids_map(
return mapping
async def _task_titles_by_id(
session: AsyncSession,
*,
task_ids: set[UUID],
) -> dict[UUID, str]:
if not task_ids:
return {}
rows = list(
await session.exec(
select(col(Task.id), col(Task.title)).where(col(Task.id).in_(task_ids)),
),
)
return {task_id: title for task_id, title in rows}
def _approval_to_read(
approval: Approval,
*,
task_ids: list[UUID],
task_titles: list[str],
) -> ApprovalRead:
def _approval_to_read(approval: Approval, *, task_ids: list[UUID]) -> ApprovalRead:
primary_task_id = task_ids[0] if task_ids else None
model = ApprovalRead.model_validate(approval, from_attributes=True)
return model.model_copy(
update={
"task_id": primary_task_id,
"task_ids": task_ids,
"task_titles": task_titles,
},
)
return model.model_copy(update={"task_id": primary_task_id, "task_ids": task_ids})
async def _approval_reads(
@@ -134,17 +105,8 @@ async def _approval_reads(
approvals: Sequence[Approval],
) -> list[ApprovalRead]:
mapping = await _approval_task_ids_map(session, approvals)
title_by_id = await _task_titles_by_id(
session,
task_ids={task_id for task_ids in mapping.values() for task_id in task_ids},
)
return [
_approval_to_read(
approval,
task_ids=(task_ids := mapping.get(approval.id, [])),
task_titles=[title_by_id[task_id] for task_id in task_ids if task_id in title_by_id],
)
for approval in approvals
_approval_to_read(approval, task_ids=mapping.get(approval.id, [])) for approval in approvals
]
@@ -152,44 +114,6 @@ def _serialize_approval(approval: ApprovalRead) -> dict[str, object]:
return approval.model_dump(mode="json")
def _pending_conflict_detail(conflicts: dict[UUID, UUID]) -> dict[str, object]:
ordered = sorted(conflicts.items(), key=lambda item: str(item[0]))
return {
"message": "Each task can have only one pending approval.",
"conflicts": [
{
"task_id": str(task_id),
"approval_id": str(approval_id),
}
for task_id, approval_id in ordered
],
}
async def _ensure_no_pending_approval_conflicts(
session: AsyncSession,
*,
board_id: UUID,
task_ids: Sequence[UUID],
exclude_approval_id: UUID | None = None,
) -> None:
normalized_task_ids = list({*task_ids})
if not normalized_task_ids:
return
await lock_tasks_for_approval(session, task_ids=normalized_task_ids)
conflicts = await pending_approval_conflicts_by_task(
session,
board_id=board_id,
task_ids=normalized_task_ids,
exclude_approval_id=exclude_approval_id,
)
if conflicts:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=_pending_conflict_detail(conflicts),
)
def _approval_resolution_message(
*,
board: Board,
@@ -400,12 +324,6 @@ async def create_approval(
payload=payload.payload,
)
task_id = task_ids[0] if task_ids else None
if payload.status == "pending":
await _ensure_no_pending_approval_conflicts(
session,
board_id=board.id,
task_ids=task_ids,
)
approval = Approval(
board_id=board.id,
task_id=task_id,
@@ -425,12 +343,7 @@ async def create_approval(
)
await session.commit()
await session.refresh(approval)
title_by_id = await _task_titles_by_id(session, task_ids=set(task_ids))
return _approval_to_read(
approval,
task_ids=task_ids,
task_titles=[title_by_id[task_id] for task_id in task_ids if task_id in title_by_id],
)
return _approval_to_read(approval, task_ids=task_ids)
@router.patch("/{approval_id}", response_model=ApprovalRead)
@@ -447,21 +360,7 @@ async def update_approval(
updates = payload.model_dump(exclude_unset=True)
prior_status = approval.status
if "status" in updates:
target_status = updates["status"]
if target_status == "pending" and prior_status != "pending":
task_ids_by_approval = await load_task_ids_by_approval(
session, approval_ids=[approval.id]
)
approval_task_ids = task_ids_by_approval.get(approval.id)
if not approval_task_ids and approval.task_id is not None:
approval_task_ids = [approval.task_id]
await _ensure_no_pending_approval_conflicts(
session,
board_id=board.id,
task_ids=approval_task_ids or [],
exclude_approval_id=approval.id,
)
approval.status = target_status
approval.status = updates["status"]
if approval.status != "pending":
approval.resolved_at = utcnow()
session.add(approval)

View File

@@ -168,29 +168,6 @@ async def start_onboarding(
.first(session)
)
if onboarding:
last_user_content: str | None = None
messages = onboarding.messages or []
if messages:
last_message = messages[-1]
if isinstance(last_message, dict):
last_role = last_message.get("role")
content = last_message.get("content")
if last_role == "user" and isinstance(content, str) and content:
last_user_content = content
if last_user_content:
# Retrigger the agent when the session is waiting on a response.
dispatcher = BoardOnboardingMessagingService(session)
await dispatcher.dispatch_answer(
board=board,
onboarding=onboarding,
answer_text=last_user_content,
correlation_id=f"onboarding.resume:{board.id}:{onboarding.id}",
)
onboarding.updated_at = utcnow()
session.add(onboarding)
await session.commit()
await session.refresh(onboarding)
return onboarding
dispatcher = BoardOnboardingMessagingService(session)

View File

@@ -1,20 +1,4 @@
"""Reusable FastAPI dependencies for auth and board/task access.
These dependencies are the main "policy wiring" layer for the API.
They:
- resolve the authenticated actor (admin user vs agent)
- enforce organization/board access rules
- provide common "load or 404" helpers (board/task)
Why this exists:
- Keeping authorization logic centralized makes it easier to reason about (and
audit) permissions as the API surface grows.
- Some routes allow either admin users or agents; others require user auth.
If you're adding a new endpoint, prefer composing from these dependencies instead
of re-implementing permission checks in the router.
"""
"""Reusable FastAPI dependencies for auth and board/task access."""
from __future__ import annotations

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import Literal
from uuid import UUID
from fastapi import APIRouter, Depends, Query
@@ -20,10 +21,8 @@ from app.models.activity_events import ActivityEvent
from app.models.agents import Agent
from app.models.tasks import Task
from app.schemas.metrics import (
DashboardBucketKey,
DashboardKpis,
DashboardMetrics,
DashboardRangeKey,
DashboardRangeSeries,
DashboardSeriesPoint,
DashboardSeriesSet,
@@ -35,6 +34,7 @@ from app.services.organizations import OrganizationContext, list_accessible_boar
router = APIRouter(prefix="/metrics", tags=["metrics"])
OFFLINE_AFTER = timedelta(minutes=10)
ERROR_EVENT_PATTERN = "%failed"
_RUNTIME_TYPE_REFERENCES = (UUID, AsyncSession)
RANGE_QUERY = Query(default="24h")
@@ -46,77 +46,46 @@ ORG_MEMBER_DEP = Depends(require_org_member)
class RangeSpec:
"""Resolved time-range specification for metric aggregation."""
key: DashboardRangeKey
key: Literal["24h", "7d"]
start: datetime
end: datetime
bucket: DashboardBucketKey
duration: timedelta
bucket: Literal["hour", "day"]
def _resolve_range(range_key: DashboardRangeKey) -> RangeSpec:
def _resolve_range(range_key: Literal["24h", "7d"]) -> RangeSpec:
now = utcnow()
specs: dict[DashboardRangeKey, tuple[timedelta, DashboardBucketKey]] = {
"24h": (timedelta(hours=24), "hour"),
"3d": (timedelta(days=3), "day"),
"7d": (timedelta(days=7), "day"),
"14d": (timedelta(days=14), "day"),
"1m": (timedelta(days=30), "day"),
"3m": (timedelta(days=90), "week"),
"6m": (timedelta(days=180), "week"),
"1y": (timedelta(days=365), "month"),
}
duration, bucket = specs[range_key]
if range_key == "7d":
return RangeSpec(
key="7d",
start=now - timedelta(days=7),
end=now,
bucket="day",
)
return RangeSpec(
key=range_key,
start=now - duration,
key="24h",
start=now - timedelta(hours=24),
end=now,
bucket=bucket,
duration=duration,
bucket="hour",
)
def _comparison_range(range_spec: RangeSpec) -> RangeSpec:
return RangeSpec(
key=range_spec.key,
start=range_spec.start - range_spec.duration,
end=range_spec.end - range_spec.duration,
bucket=range_spec.bucket,
duration=range_spec.duration,
)
def _comparison_range(range_key: Literal["24h", "7d"]) -> RangeSpec:
return _resolve_range("7d" if range_key == "24h" else "24h")
def _bucket_start(value: datetime, bucket: DashboardBucketKey) -> datetime:
normalized = value.replace(hour=0, minute=0, second=0, microsecond=0)
if bucket == "month":
return normalized.replace(day=1)
if bucket == "week":
return normalized - timedelta(days=normalized.weekday())
def _bucket_start(value: datetime, bucket: Literal["hour", "day"]) -> datetime:
if bucket == "day":
return normalized
return value.replace(hour=0, minute=0, second=0, microsecond=0)
return value.replace(minute=0, second=0, microsecond=0)
def _next_bucket(cursor: datetime, bucket: DashboardBucketKey) -> datetime:
if bucket == "hour":
return cursor + timedelta(hours=1)
if bucket == "day":
return cursor + timedelta(days=1)
if bucket == "week":
return cursor + timedelta(days=7)
next_month = cursor.month + 1
next_year = cursor.year
if next_month > 12:
next_month = 1
next_year += 1
return cursor.replace(year=next_year, month=next_month, day=1)
def _build_buckets(range_spec: RangeSpec) -> list[datetime]:
cursor = _bucket_start(range_spec.start, range_spec.bucket)
step = timedelta(days=1) if range_spec.bucket == "day" else timedelta(hours=1)
buckets: list[datetime] = []
while cursor <= range_spec.end:
buckets.append(cursor)
cursor = _next_bucket(cursor, range_spec.bucket)
cursor += step
return buckets
@@ -148,7 +117,6 @@ def _wip_series_from_mapping(
inbox=values.get("inbox", 0),
in_progress=values.get("in_progress", 0),
review=values.get("review", 0),
done=values.get("done", 0),
),
)
return DashboardWipRangeSeries(
@@ -247,65 +215,50 @@ async def _query_wip(
range_spec: RangeSpec,
board_ids: list[UUID],
) -> DashboardWipRangeSeries:
if not board_ids:
return _wip_series_from_mapping(range_spec, {})
inbox_bucket_col = func.date_trunc(range_spec.bucket, Task.created_at).label("inbox_bucket")
inbox_statement = (
select(inbox_bucket_col, func.count())
.where(col(Task.status) == "inbox")
.where(col(Task.created_at) >= range_spec.start)
.where(col(Task.created_at) <= range_spec.end)
.where(col(Task.board_id).in_(board_ids))
.group_by(inbox_bucket_col)
.order_by(inbox_bucket_col)
)
inbox_results = (await session.exec(inbox_statement)).all()
status_bucket_col = func.date_trunc(range_spec.bucket, Task.updated_at).label("status_bucket")
bucket_col = func.date_trunc(range_spec.bucket, Task.updated_at).label("bucket")
inbox_case = case((col(Task.status) == "inbox", 1), else_=0)
progress_case = case((col(Task.status) == "in_progress", 1), else_=0)
review_case = case((col(Task.status) == "review", 1), else_=0)
done_case = case((col(Task.status) == "done", 1), else_=0)
status_statement = (
statement = (
select(
status_bucket_col,
bucket_col,
func.sum(inbox_case),
func.sum(progress_case),
func.sum(review_case),
func.sum(done_case),
)
.where(col(Task.updated_at) >= range_spec.start)
.where(col(Task.updated_at) <= range_spec.end)
.where(col(Task.board_id).in_(board_ids))
.group_by(status_bucket_col)
.order_by(status_bucket_col)
)
status_results = (await session.exec(status_statement)).all()
if not board_ids:
return _wip_series_from_mapping(range_spec, {})
statement = (
statement.where(col(Task.board_id).in_(board_ids)).group_by(bucket_col).order_by(bucket_col)
)
results = (await session.exec(statement)).all()
mapping: dict[datetime, dict[str, int]] = {}
for bucket, inbox in inbox_results:
values = mapping.setdefault(bucket, {})
values["inbox"] = int(inbox or 0)
for bucket, in_progress, review, done in status_results:
values = mapping.setdefault(bucket, {})
values["in_progress"] = int(in_progress or 0)
values["review"] = int(review or 0)
values["done"] = int(done or 0)
for bucket, inbox, in_progress, review in results:
mapping[bucket] = {
"inbox": int(inbox or 0),
"in_progress": int(in_progress or 0),
"review": int(review or 0),
}
return _wip_series_from_mapping(range_spec, mapping)
async def _median_cycle_time_for_range(
async def _median_cycle_time_7d(
session: AsyncSession,
range_spec: RangeSpec,
board_ids: list[UUID],
) -> float | None:
now = utcnow()
start = now - timedelta(days=7)
in_progress = sql_cast(Task.in_progress_at, DateTime)
duration_hours = func.extract("epoch", Task.updated_at - in_progress) / 3600.0
statement = (
select(func.percentile_cont(0.5).within_group(duration_hours))
.where(col(Task.status) == "review")
.where(col(Task.in_progress_at).is_not(None))
.where(col(Task.updated_at) >= range_spec.start)
.where(col(Task.updated_at) <= range_spec.end)
.where(col(Task.updated_at) >= start)
.where(col(Task.updated_at) <= now)
)
if not board_ids:
return None
@@ -350,15 +303,11 @@ async def _error_rate_kpi(
return (error_count / total_count) * 100 if total_count > 0 else 0.0
async def _active_agents(
session: AsyncSession,
range_spec: RangeSpec,
board_ids: list[UUID],
) -> int:
async def _active_agents(session: AsyncSession, board_ids: list[UUID]) -> int:
threshold = utcnow() - OFFLINE_AFTER
statement = select(func.count()).where(
col(Agent.last_seen_at).is_not(None),
col(Agent.last_seen_at) >= range_spec.start,
col(Agent.last_seen_at) <= range_spec.end,
col(Agent.last_seen_at) >= threshold,
)
if not board_ids:
return 0
@@ -367,18 +316,12 @@ async def _active_agents(
return int(result)
async def _tasks_in_progress(
session: AsyncSession,
range_spec: RangeSpec,
board_ids: list[UUID],
) -> int:
async def _tasks_in_progress(session: AsyncSession, board_ids: list[UUID]) -> int:
if not board_ids:
return 0
statement = (
select(func.count())
.where(col(Task.status) == "in_progress")
.where(col(Task.updated_at) >= range_spec.start)
.where(col(Task.updated_at) <= range_spec.end)
.where(col(Task.board_id).in_(board_ids))
)
result = (await session.exec(statement)).one()
@@ -387,13 +330,13 @@ async def _tasks_in_progress(
@router.get("/dashboard", response_model=DashboardMetrics)
async def dashboard_metrics(
range_key: DashboardRangeKey = RANGE_QUERY,
range_key: Literal["24h", "7d"] = RANGE_QUERY,
session: AsyncSession = SESSION_DEP,
ctx: OrganizationContext = ORG_MEMBER_DEP,
) -> DashboardMetrics:
"""Return dashboard KPIs and time-series data for accessible boards."""
primary = _resolve_range(range_key)
comparison = _comparison_range(primary)
comparison = _comparison_range(range_key)
board_ids = await list_accessible_board_ids(session, member=ctx.member, write=False)
throughput_primary = await _query_throughput(session, primary, board_ids)
@@ -422,14 +365,10 @@ async def dashboard_metrics(
)
kpis = DashboardKpis(
active_agents=await _active_agents(session, primary, board_ids),
tasks_in_progress=await _tasks_in_progress(session, primary, board_ids),
active_agents=await _active_agents(session, board_ids),
tasks_in_progress=await _tasks_in_progress(session, board_ids),
error_rate_pct=await _error_rate_kpi(session, primary, board_ids),
median_cycle_time_hours_7d=await _median_cycle_time_for_range(
session,
primary,
board_ids,
),
median_cycle_time_hours_7d=await _median_cycle_time_7d(session, board_ids),
)
return DashboardMetrics(

View File

@@ -1,220 +0,0 @@
"""Tag CRUD endpoints for organization-scoped task categorization."""
from __future__ import annotations
from typing import TYPE_CHECKING
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy import func
from sqlmodel import col, select
from app.api.deps import require_org_admin, require_org_member
from app.core.time import utcnow
from app.db import crud
from app.db.pagination import paginate
from app.db.session import get_session
from app.models.tag_assignments import TagAssignment
from app.models.tags import Tag
from app.schemas.common import OkResponse
from app.schemas.pagination import DefaultLimitOffsetPage
from app.schemas.tags import TagCreate, TagRead, TagUpdate
from app.services.organizations import OrganizationContext
from app.services.tags import slugify_tag, task_counts_for_tags
if TYPE_CHECKING:
from collections.abc import Sequence
from fastapi_pagination.limit_offset import LimitOffsetPage
from sqlmodel.ext.asyncio.session import AsyncSession
router = APIRouter(prefix="/tags", tags=["tags"])
SESSION_DEP = Depends(get_session)
ORG_MEMBER_DEP = Depends(require_org_member)
ORG_ADMIN_DEP = Depends(require_org_admin)
def _normalize_slug(slug: str | None, *, fallback_name: str) -> str:
source = (slug or "").strip() or fallback_name
return slugify_tag(source)
async def _require_org_tag(
session: AsyncSession,
*,
tag_id: UUID,
ctx: OrganizationContext,
) -> Tag:
tag = await Tag.objects.by_id(tag_id).first(session)
if tag is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
if tag.organization_id != ctx.organization.id:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
return tag
async def _ensure_slug_available(
session: AsyncSession,
*,
organization_id: UUID,
slug: str,
exclude_tag_id: UUID | None = None,
) -> None:
existing = await Tag.objects.filter_by(organization_id=organization_id, slug=slug).first(
session
)
if existing is None:
return
if exclude_tag_id is not None and existing.id == exclude_tag_id:
return
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Tag slug already exists in this organization.",
)
async def _tag_read_page(
*,
session: AsyncSession,
items: Sequence[Tag],
) -> list[TagRead]:
if not items:
return []
counts = await task_counts_for_tags(
session,
tag_ids=[item.id for item in items],
)
return [
TagRead.model_validate(item, from_attributes=True).model_copy(
update={"task_count": counts.get(item.id, 0)},
)
for item in items
]
@router.get("", response_model=DefaultLimitOffsetPage[TagRead])
async def list_tags(
session: AsyncSession = SESSION_DEP,
ctx: OrganizationContext = ORG_MEMBER_DEP,
) -> LimitOffsetPage[TagRead]:
"""List tags for the active organization."""
statement = (
select(Tag)
.where(col(Tag.organization_id) == ctx.organization.id)
.order_by(func.lower(col(Tag.name)).asc(), col(Tag.created_at).asc())
)
async def _transform(items: Sequence[object]) -> Sequence[object]:
tags: list[Tag] = []
for item in items:
if not isinstance(item, Tag):
msg = "Expected Tag items from paginated query"
raise TypeError(msg)
tags.append(item)
return await _tag_read_page(session=session, items=tags)
return await paginate(session, statement, transformer=_transform)
@router.post("", response_model=TagRead)
async def create_tag(
payload: TagCreate,
session: AsyncSession = SESSION_DEP,
ctx: OrganizationContext = ORG_ADMIN_DEP,
) -> TagRead:
"""Create a tag within the active organization."""
slug = _normalize_slug(payload.slug, fallback_name=payload.name)
await _ensure_slug_available(
session,
organization_id=ctx.organization.id,
slug=slug,
)
tag = await crud.create(
session,
Tag,
organization_id=ctx.organization.id,
name=payload.name,
slug=slug,
color=payload.color,
description=payload.description,
)
return TagRead.model_validate(tag, from_attributes=True)
@router.get("/{tag_id}", response_model=TagRead)
async def get_tag(
tag_id: UUID,
session: AsyncSession = SESSION_DEP,
ctx: OrganizationContext = ORG_MEMBER_DEP,
) -> TagRead:
"""Get a single tag in the active organization."""
tag = await _require_org_tag(
session,
tag_id=tag_id,
ctx=ctx,
)
count = (
await session.exec(
select(func.count(col(TagAssignment.task_id))).where(
col(TagAssignment.tag_id) == tag.id,
),
)
).one()
return TagRead.model_validate(tag, from_attributes=True).model_copy(
update={"task_count": int(count or 0)},
)
@router.patch("/{tag_id}", response_model=TagRead)
async def update_tag(
tag_id: UUID,
payload: TagUpdate,
session: AsyncSession = SESSION_DEP,
ctx: OrganizationContext = ORG_ADMIN_DEP,
) -> TagRead:
"""Update a tag in the active organization."""
tag = await _require_org_tag(
session,
tag_id=tag_id,
ctx=ctx,
)
updates = payload.model_dump(exclude_unset=True)
if "slug" in payload.model_fields_set:
updates["slug"] = _normalize_slug(
updates.get("slug"),
fallback_name=str(updates.get("name") or tag.name),
)
if "slug" in updates and isinstance(updates["slug"], str):
await _ensure_slug_available(
session,
organization_id=ctx.organization.id,
slug=updates["slug"],
exclude_tag_id=tag.id,
)
updates["updated_at"] = utcnow()
updated = await crud.patch(session, tag, updates)
return TagRead.model_validate(updated, from_attributes=True)
@router.delete("/{tag_id}", response_model=OkResponse)
async def delete_tag(
tag_id: UUID,
session: AsyncSession = SESSION_DEP,
ctx: OrganizationContext = ORG_ADMIN_DEP,
) -> OkResponse:
"""Delete a tag and remove all associated tag links."""
tag = await _require_org_tag(
session,
tag_id=tag_id,
ctx=ctx,
)
await crud.delete_where(
session,
TagAssignment,
col(TagAssignment.tag_id) == tag.id,
commit=False,
)
await session.delete(tag)
await session.commit()
return OkResponse()

View File

@@ -32,11 +32,9 @@ from app.models.agents import Agent
from app.models.approval_task_links import ApprovalTaskLink
from app.models.approvals import Approval
from app.models.boards import Board
from app.models.tag_assignments import TagAssignment
from app.models.task_dependencies import TaskDependency
from app.models.task_fingerprints import TaskFingerprint
from app.models.tasks import Task
from app.schemas.activity_events import ActivityEventRead
from app.schemas.common import OkResponse
from app.schemas.errors import BlockedTaskError
from app.schemas.pagination import DefaultLimitOffsetPage
@@ -48,12 +46,6 @@ from app.services.openclaw.gateway_dispatch import GatewayDispatchService
from app.services.openclaw.gateway_rpc import GatewayConfig as GatewayClientConfig
from app.services.openclaw.gateway_rpc import OpenClawGatewayError
from app.services.organizations import require_board_access
from app.services.tags import (
TagState,
load_tag_state,
replace_tags,
validate_tag_ids,
)
from app.services.task_dependencies import (
blocked_by_dependency_ids,
dependency_ids_by_task_id,
@@ -583,10 +575,6 @@ async def _task_read_page(
return []
task_ids = [task.id for task in tasks]
tag_state_by_task_id = await load_tag_state(
session,
task_ids=task_ids,
)
deps_map = await dependency_ids_by_task_id(
session,
board_id=board_id,
@@ -603,7 +591,6 @@ async def _task_read_page(
output: list[TaskRead] = []
for task in tasks:
tag_state = tag_state_by_task_id.get(task.id, TagState())
dep_list = deps_map.get(task.id, [])
blocked_by = blocked_by_dependency_ids(
dependency_ids=dep_list,
@@ -615,8 +602,6 @@ async def _task_read_page(
TaskRead.model_validate(task, from_attributes=True).model_copy(
update={
"depends_on_task_ids": dep_list,
"tag_ids": tag_state.tag_ids,
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_by,
"is_blocked": bool(blocked_by),
},
@@ -625,22 +610,18 @@ async def _task_read_page(
return output
async def _stream_task_state(
async def _stream_dependency_state(
session: AsyncSession,
*,
board_id: UUID,
rows: list[tuple[ActivityEvent, Task | None]],
) -> tuple[dict[UUID, list[UUID]], dict[UUID, str], dict[UUID, TagState]]:
) -> tuple[dict[UUID, list[UUID]], dict[UUID, str]]:
task_ids = [
task.id for event, task in rows if task is not None and event.event_type != "task.comment"
]
if not task_ids:
return {}, {}, {}
return {}, {}
tag_state_by_task_id = await load_tag_state(
session,
task_ids=list({*task_ids}),
)
deps_map = await dependency_ids_by_task_id(
session,
board_id=board_id,
@@ -650,14 +631,14 @@ async def _stream_task_state(
for value in deps_map.values():
dep_ids.extend(value)
if not dep_ids:
return deps_map, {}, tag_state_by_task_id
return deps_map, {}
dep_status = await dependency_status_by_id(
session,
board_id=board_id,
dependency_ids=list({*dep_ids}),
)
return deps_map, dep_status, tag_state_by_task_id
return deps_map, dep_status
def _task_event_payload(
@@ -666,12 +647,8 @@ def _task_event_payload(
*,
deps_map: dict[UUID, list[UUID]],
dep_status: dict[UUID, str],
tag_state_by_task_id: dict[UUID, TagState],
) -> dict[str, object]:
payload: dict[str, object] = {
"type": event.event_type,
"activity": ActivityEventRead.model_validate(event).model_dump(mode="json"),
}
payload: dict[str, object] = {"type": event.event_type}
if event.event_type == "task.comment":
payload["comment"] = _serialize_comment(event)
return payload
@@ -679,7 +656,6 @@ def _task_event_payload(
payload["task"] = None
return payload
tag_state = tag_state_by_task_id.get(task.id, TagState())
dep_list = deps_map.get(task.id, [])
blocked_by = blocked_by_dependency_ids(
dependency_ids=dep_list,
@@ -692,8 +668,6 @@ def _task_event_payload(
.model_copy(
update={
"depends_on_task_ids": dep_list,
"tag_ids": tag_state.tag_ids,
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_by,
"is_blocked": bool(blocked_by),
},
@@ -719,7 +693,7 @@ async def _task_event_generator(
async with async_session_maker() as session:
rows = await _fetch_task_events(session, board_id, last_seen)
deps_map, dep_status, tag_state_by_task_id = await _stream_task_state(
deps_map, dep_status = await _stream_dependency_state(
session,
board_id=board_id,
rows=rows,
@@ -740,7 +714,6 @@ async def _task_event_generator(
task,
deps_map=deps_map,
dep_status=dep_status,
tag_state_by_task_id=tag_state_by_task_id,
)
yield {"event": "task", "data": json.dumps(payload)}
await asyncio.sleep(2)
@@ -801,9 +774,8 @@ async def create_task(
auth: AuthContext = ADMIN_AUTH_DEP,
) -> TaskRead:
"""Create a task and initialize dependency rows."""
data = payload.model_dump(exclude={"depends_on_task_ids", "tag_ids"})
data = payload.model_dump(exclude={"depends_on_task_ids"})
depends_on_task_ids = list(payload.depends_on_task_ids)
tag_ids = list(payload.tag_ids)
task = Task.model_validate(data)
task.board_id = board.id
@@ -816,11 +788,6 @@ async def create_task(
task_id=task.id,
depends_on_task_ids=depends_on_task_ids,
)
normalized_tag_ids = await validate_tag_ids(
session,
organization_id=board.organization_id,
tag_ids=tag_ids,
)
dep_status = await dependency_status_by_id(
session,
board_id=board.id,
@@ -843,11 +810,6 @@ async def create_task(
depends_on_task_id=dep_id,
),
)
await replace_tags(
session,
task_id=task.id,
tag_ids=normalized_tag_ids,
)
await session.commit()
await session.refresh(task)
@@ -870,10 +832,12 @@ async def create_task(
task=task,
agent=assigned_agent,
)
return await _task_read_response(
session,
task=task,
board_id=board.id,
return TaskRead.model_validate(task, from_attributes=True).model_copy(
update={
"depends_on_task_ids": normalized_deps,
"blocked_by_task_ids": blocked_by,
"is_blocked": bool(blocked_by),
},
)
@@ -908,10 +872,8 @@ async def update_task(
depends_on_task_ids = (
payload.depends_on_task_ids if "depends_on_task_ids" in payload.model_fields_set else None
)
tag_ids = payload.tag_ids if "tag_ids" in payload.model_fields_set else None
updates.pop("comment", None)
updates.pop("depends_on_task_ids", None)
updates.pop("tag_ids", None)
update = _TaskUpdateInput(
task=task,
actor=actor,
@@ -921,7 +883,6 @@ async def update_task(
updates=updates,
comment=comment,
depends_on_task_ids=depends_on_task_ids,
tag_ids=tag_ids,
)
if actor.actor_type == "agent" and actor.agent and actor.agent.is_board_lead:
return await _apply_lead_task_update(session, update=update)
@@ -992,12 +953,6 @@ async def delete_task(
),
commit=False,
)
await crud.delete_where(
session,
TagAssignment,
col(TagAssignment.task_id) == task.id,
commit=False,
)
await session.delete(task)
await session.commit()
return OkResponse()
@@ -1160,8 +1115,6 @@ class _TaskUpdateInput:
updates: dict[str, object]
comment: str | None
depends_on_task_ids: list[UUID] | None
tag_ids: list[UUID] | None
normalized_tag_ids: list[UUID] | None = None
def _required_status_value(value: object) -> str:
@@ -1176,21 +1129,6 @@ def _optional_assigned_agent_id(value: object) -> UUID | None:
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY)
async def _board_organization_id(
session: AsyncSession,
*,
board_id: UUID,
) -> UUID:
organization_id = (
await session.exec(
select(Board.organization_id).where(col(Board.id) == board_id),
)
).first()
if organization_id is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return organization_id
async def _task_dep_ids(
session: AsyncSession,
*,
@@ -1231,10 +1169,6 @@ async def _task_read_response(
board_id: UUID,
) -> TaskRead:
dep_ids = await _task_dep_ids(session, board_id=board_id, task_id=task.id)
tag_state = (await load_tag_state(session, task_ids=[task.id])).get(
task.id,
TagState(),
)
blocked_ids = await _task_blocked_ids(
session,
board_id=board_id,
@@ -1245,8 +1179,6 @@ async def _task_read_response(
return TaskRead.model_validate(task, from_attributes=True).model_copy(
update={
"depends_on_task_ids": dep_ids,
"tag_ids": tag_state.tag_ids,
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_ids,
"is_blocked": bool(blocked_ids),
},
@@ -1273,13 +1205,11 @@ def _lead_requested_fields(update: _TaskUpdateInput) -> set[str]:
requested_fields.add("comment")
if update.depends_on_task_ids is not None:
requested_fields.add("depends_on_task_ids")
if update.tag_ids is not None:
requested_fields.add("tag_ids")
return requested_fields
def _validate_lead_update_request(update: _TaskUpdateInput) -> None:
allowed_fields = {"assigned_agent_id", "status", "depends_on_task_ids", "tag_ids"}
allowed_fields = {"assigned_agent_id", "status", "depends_on_task_ids"}
requested_fields = _lead_requested_fields(update)
if update.comment is not None or not requested_fields.issubset(allowed_fields):
raise HTTPException(
@@ -1326,24 +1256,6 @@ async def _lead_effective_dependencies(
return effective_deps, blocked_by
async def _normalized_update_tag_ids(
session: AsyncSession,
*,
update: _TaskUpdateInput,
) -> list[UUID] | None:
if update.tag_ids is None:
return None
organization_id = await _board_organization_id(
session,
board_id=update.board_id,
)
return await validate_tag_ids(
session,
organization_id=organization_id,
tag_ids=update.tag_ids,
)
async def _lead_apply_assignment(
session: AsyncSession,
*,
@@ -1435,10 +1347,6 @@ async def _apply_lead_task_update(
session,
update=update,
)
normalized_tag_ids = await _normalized_update_tag_ids(
session,
update=update,
)
if blocked_by and update.task.status != "done":
update.task.status = "inbox"
@@ -1448,13 +1356,6 @@ async def _apply_lead_task_update(
await _lead_apply_assignment(session, update=update)
_lead_apply_status(update)
if normalized_tag_ids is not None:
await replace_tags(
session,
task_id=update.task.id,
tag_ids=normalized_tag_ids,
)
update.task.updated_at = utcnow()
session.add(update.task)
event_type, message = _task_event_details(update.task, update.previous_status)
@@ -1497,12 +1398,8 @@ async def _apply_non_lead_agent_task_rules(
):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
allowed_fields = {"status", "comment"}
if (
update.depends_on_task_ids is not None
or update.tag_ids is not None
or not set(update.updates).issubset(
allowed_fields,
)
if update.depends_on_task_ids is not None or not set(update.updates).issubset(
allowed_fields,
):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
if "status" in update.updates:
@@ -1535,10 +1432,6 @@ async def _apply_admin_task_rules(
update: _TaskUpdateInput,
) -> None:
admin_normalized_deps: list[UUID] | None = None
update.normalized_tag_ids = await _normalized_update_tag_ids(
session,
update=update,
)
if update.depends_on_task_ids is not None:
if update.task.status == "done":
raise HTTPException(
@@ -1714,21 +1607,6 @@ async def _finalize_updated_task(
):
raise _comment_validation_error()
if update.tag_ids is not None:
normalized = (
update.normalized_tag_ids
if update.normalized_tag_ids is not None
else await _normalized_update_tag_ids(
session,
update=update,
)
)
await replace_tags(
session,
task_id=update.task.id,
tag_ids=normalized or [],
)
session.add(update.task)
await session.commit()
await session.refresh(update.task)

View File

@@ -1,17 +1,4 @@
"""Agent authentication helpers for token-backed API access.
This module is used for *agent-originated* API calls (as opposed to human users).
Key ideas:
- Agents authenticate with an opaque token presented as `X-Agent-Token: <token>`.
- For convenience, some deployments may also allow `Authorization: Bearer <token>`
for agents (controlled by caller/dependency).
- To reduce write-amplification, we only touch `Agent.last_seen_at` at a fixed
interval and we avoid touching it for safe/read-only HTTP methods.
This is intentionally separate from user authentication (Clerk/local bearer token)
so we can evolve agent policy independently.
"""
"""Agent authentication helpers for token-backed API access."""
from __future__ import annotations

View File

@@ -1,19 +1,4 @@
"""User authentication helpers for Clerk and local-token auth modes.
This module resolves an authenticated *user* from inbound HTTP requests.
Auth modes:
- `local`: a single shared bearer token (`LOCAL_AUTH_TOKEN`) for self-hosted
deployments.
- `clerk`: Clerk JWT authentication for multi-user deployments.
The public surface area is the `get_auth_context*` dependencies, which return an
`AuthContext` used across API routers.
Notes:
- This file documents *why* some choices exist (e.g. claim extraction fallbacks)
so maintainers can safely modify auth behavior later.
"""
"""User authentication helpers for Clerk and local-token auth modes."""
from __future__ import annotations

View File

@@ -1,23 +1,4 @@
"""Global exception handlers and request-id middleware for FastAPI.
This module standardizes two operational behaviors:
1) **Request IDs**
- Every response includes an `X-Request-Id` header.
- Clients may supply their own request id; otherwise we generate one.
- The request id is propagated into logs via context vars.
2) **Error responses**
- Errors are returned as JSON with a stable top-level shape:
`{ "detail": ..., "request_id": ... }`
- Validation errors (`422`) return structured field errors.
- Unhandled errors are logged at ERROR and return a generic 500.
Design notes:
- The request-id middleware is installed *outermost* so it runs even when other
middleware returns early.
- Health endpoints are excluded from request logs by default to reduce noise.
"""
"""Global exception handlers and request-id middleware for FastAPI."""
from __future__ import annotations
@@ -224,27 +205,12 @@ def _get_request_id(request: Request) -> str | None:
def _error_payload(*, detail: object, request_id: str | None) -> dict[str, object]:
payload: dict[str, Any] = {"detail": _json_safe(detail)}
payload: dict[str, Any] = {"detail": detail}
if request_id:
payload["request_id"] = request_id
return payload
def _json_safe(value: object) -> object:
"""Return a JSON-serializable representation for error payloads."""
if isinstance(value, bytes):
return value.decode("utf-8", errors="replace")
if isinstance(value, (bytearray, memoryview)):
return bytes(value).decode("utf-8", errors="replace")
if isinstance(value, dict):
return {str(key): _json_safe(item) for key, item in value.items()}
if isinstance(value, (list, tuple, set)):
return [_json_safe(item) for item in value]
if value is None or isinstance(value, (str, int, float, bool)):
return value
return str(value)
async def _request_validation_handler(
request: Request,
exc: RequestValidationError,

View File

@@ -24,7 +24,6 @@ from app.api.gateways import router as gateways_router
from app.api.metrics import router as metrics_router
from app.api.organizations import router as organizations_router
from app.api.souls_directory import router as souls_directory_router
from app.api.tags import router as tags_router
from app.api.tasks import router as tasks_router
from app.api.users import router as users_router
from app.core.config import settings
@@ -108,7 +107,6 @@ api_v1.include_router(board_memory_router)
api_v1.include_router(board_onboarding_router)
api_v1.include_router(approvals_router)
api_v1.include_router(tasks_router)
api_v1.include_router(tags_router)
api_v1.include_router(users_router)
app.include_router(api_v1)

View File

@@ -15,8 +15,6 @@ from app.models.organization_invite_board_access import OrganizationInviteBoardA
from app.models.organization_invites import OrganizationInvite
from app.models.organization_members import OrganizationMember
from app.models.organizations import Organization
from app.models.tag_assignments import TagAssignment
from app.models.tags import Tag
from app.models.task_dependencies import TaskDependency
from app.models.task_fingerprints import TaskFingerprint
from app.models.tasks import Task
@@ -41,7 +39,5 @@ __all__ = [
"TaskDependency",
"Task",
"TaskFingerprint",
"Tag",
"TagAssignment",
"User",
]

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from datetime import datetime
from uuid import UUID, uuid4
from sqlalchemy import JSON, Column, Float
from sqlalchemy import JSON, Column
from sqlmodel import Field
from app.core.time import utcnow
@@ -25,7 +25,7 @@ class Approval(QueryModel, table=True):
agent_id: UUID | None = Field(default=None, foreign_key="agents.id", index=True)
action_type: str
payload: dict[str, object] | None = Field(default=None, sa_column=Column(JSON))
confidence: float = Field(sa_column=Column(Float, nullable=False))
confidence: int
rubric_scores: dict[str, int] | None = Field(default=None, sa_column=Column(JSON))
status: str = Field(default="pending", index=True)
created_at: datetime = Field(default_factory=utcnow)

View File

@@ -1,32 +0,0 @@
"""Task/tag many-to-many link rows."""
from __future__ import annotations
from datetime import datetime
from uuid import UUID, uuid4
from sqlalchemy import UniqueConstraint
from sqlmodel import Field
from app.core.time import utcnow
from app.models.base import QueryModel
RUNTIME_ANNOTATION_TYPES = (datetime,)
class TagAssignment(QueryModel, table=True):
"""Association row mapping one task to one tag."""
__tablename__ = "tag_assignments" # pyright: ignore[reportAssignmentType]
__table_args__ = (
UniqueConstraint(
"task_id",
"tag_id",
name="uq_tag_assignments_task_id_tag_id",
),
)
id: UUID = Field(default_factory=uuid4, primary_key=True)
task_id: UUID = Field(foreign_key="tasks.id", index=True)
tag_id: UUID = Field(foreign_key="tags.id", index=True)
created_at: datetime = Field(default_factory=utcnow)

View File

@@ -1,36 +0,0 @@
"""Tag model for organization-scoped task categorization."""
from __future__ import annotations
from datetime import datetime
from uuid import UUID, uuid4
from sqlalchemy import UniqueConstraint
from sqlmodel import Field
from app.core.time import utcnow
from app.models.tenancy import TenantScoped
RUNTIME_ANNOTATION_TYPES = (datetime,)
class Tag(TenantScoped, table=True):
"""Organization-scoped tag used to classify and group tasks."""
__tablename__ = "tags" # pyright: ignore[reportAssignmentType]
__table_args__ = (
UniqueConstraint(
"organization_id",
"slug",
name="uq_tags_organization_id_slug",
),
)
id: UUID = Field(default_factory=uuid4, primary_key=True)
organization_id: UUID = Field(foreign_key="organizations.id", index=True)
name: str
slug: str = Field(index=True)
color: str = Field(default="9e9e9e")
description: str | None = None
created_at: datetime = Field(default_factory=utcnow)
updated_at: datetime = Field(default_factory=utcnow)

View File

@@ -31,7 +31,6 @@ from app.schemas.souls_directory import (
SoulsDirectorySearchResponse,
SoulsDirectorySoulRef,
)
from app.schemas.tags import TagCreate, TagRead, TagRef, TagUpdate
from app.schemas.tasks import TaskCreate, TaskRead, TaskUpdate
from app.schemas.users import UserCreate, UserRead, UserUpdate
@@ -71,10 +70,6 @@ __all__ = [
"SoulsDirectoryMarkdownResponse",
"SoulsDirectorySearchResponse",
"SoulsDirectorySoulRef",
"TagCreate",
"TagRead",
"TagRef",
"TagUpdate",
"TaskCreate",
"TaskRead",
"TaskUpdate",

View File

@@ -11,7 +11,6 @@ from sqlmodel import Field, SQLModel
ApprovalStatus = Literal["pending", "approved", "rejected"]
STATUS_REQUIRED_ERROR = "status is required"
LEAD_REASONING_REQUIRED_ERROR = "lead reasoning is required"
RUNTIME_ANNOTATION_TYPES = (datetime, UUID)
@@ -22,7 +21,7 @@ class ApprovalBase(SQLModel):
task_id: UUID | None = None
task_ids: list[UUID] = Field(default_factory=list)
payload: dict[str, object] | None = None
confidence: float = Field(ge=0, le=100)
confidence: int
rubric_scores: dict[str, int] | None = None
status: ApprovalStatus = "pending"
@@ -49,21 +48,6 @@ class ApprovalCreate(ApprovalBase):
agent_id: UUID | None = None
@model_validator(mode="after")
def validate_lead_reasoning(self) -> Self:
"""Ensure each approval request includes explicit lead reasoning."""
payload = self.payload
if isinstance(payload, dict):
reason = payload.get("reason")
if isinstance(reason, str) and reason.strip():
return self
decision = payload.get("decision")
if isinstance(decision, dict):
nested_reason = decision.get("reason")
if isinstance(nested_reason, str) and nested_reason.strip():
return self
raise ValueError(LEAD_REASONING_REQUIRED_ERROR)
class ApprovalUpdate(SQLModel):
"""Payload for mutating approval status."""
@@ -83,7 +67,6 @@ class ApprovalRead(ApprovalBase):
id: UUID
board_id: UUID
task_titles: list[str] = Field(default_factory=list)
agent_id: UUID | None = None
created_at: datetime
resolved_at: datetime | None = None

View File

@@ -8,8 +8,6 @@ from typing import Literal
from sqlmodel import SQLModel
RUNTIME_ANNOTATION_TYPES = (datetime,)
DashboardRangeKey = Literal["24h", "3d", "7d", "14d", "1m", "3m", "6m", "1y"]
DashboardBucketKey = Literal["hour", "day", "week", "month"]
class DashboardSeriesPoint(SQLModel):
@@ -26,22 +24,21 @@ class DashboardWipPoint(SQLModel):
inbox: int
in_progress: int
review: int
done: int
class DashboardRangeSeries(SQLModel):
"""Series payload for a single range/bucket combination."""
range: DashboardRangeKey
bucket: DashboardBucketKey
range: Literal["24h", "7d"]
bucket: Literal["hour", "day"]
points: list[DashboardSeriesPoint]
class DashboardWipRangeSeries(SQLModel):
"""WIP series payload for a single range/bucket combination."""
range: DashboardRangeKey
bucket: DashboardBucketKey
range: Literal["24h", "7d"]
bucket: Literal["hour", "day"]
points: list[DashboardWipPoint]
@@ -71,7 +68,7 @@ class DashboardKpis(SQLModel):
class DashboardMetrics(SQLModel):
"""Complete dashboard metrics response payload."""
range: DashboardRangeKey
range: Literal["24h", "7d"]
generated_at: datetime
kpis: DashboardKpis
throughput: DashboardSeriesSet

View File

@@ -1,126 +0,0 @@
"""Schemas for tag CRUD payloads."""
from __future__ import annotations
import re
from datetime import datetime
from typing import Self
from uuid import UUID
from pydantic import field_validator, model_validator
from sqlmodel import SQLModel
from app.schemas.common import NonEmptyStr
HEX_COLOR_RE = re.compile(r"^[0-9a-f]{6}$")
RUNTIME_ANNOTATION_TYPES = (datetime, UUID, NonEmptyStr)
def _normalize_color(value: str | None) -> str | None:
if value is None:
return None
cleaned = value.strip().lower().lstrip("#")
if not cleaned:
return None
if not HEX_COLOR_RE.fullmatch(cleaned):
raise ValueError("color must be a 6-digit hex value")
return cleaned
class TagBase(SQLModel):
"""Shared tag fields for create/read payloads."""
name: str
slug: str
color: str = "9e9e9e"
description: str | None = None
class TagRef(SQLModel):
"""Compact tag representation embedded in task payloads."""
id: UUID
name: str
slug: str
color: str
class TagCreate(SQLModel):
"""Payload for creating a tag."""
name: NonEmptyStr
slug: str | None = None
color: str = "9e9e9e"
description: str | None = None
@field_validator("slug", mode="before")
@classmethod
def normalize_slug(cls, value: object) -> object | None:
"""Treat empty slug strings as unset so API can auto-generate."""
if value is None:
return None
if isinstance(value, str):
cleaned = value.strip()
return cleaned or None
return value
@field_validator("color", mode="before")
@classmethod
def normalize_color(cls, value: object) -> object:
"""Normalize color to lowercase hex without a leading hash."""
if isinstance(value, str):
normalized = _normalize_color(value)
if normalized is None:
raise ValueError("color is required")
return normalized
return value
class TagUpdate(SQLModel):
"""Payload for partial tag updates."""
name: NonEmptyStr | None = None
slug: str | None = None
color: str | None = None
description: str | None = None
@field_validator("slug", mode="before")
@classmethod
def normalize_slug(cls, value: object) -> object | None:
"""Treat empty slug strings as unset so API can auto-generate."""
if value is None:
return None
if isinstance(value, str):
cleaned = value.strip()
return cleaned or None
return value
@field_validator("color", mode="before")
@classmethod
def normalize_color(cls, value: object) -> object | None:
"""Normalize color to lowercase hex without a leading hash."""
if value is None:
return None
if isinstance(value, str):
normalized = _normalize_color(value)
if normalized is None:
raise ValueError("color must be a 6-digit hex value")
return normalized
return value
@model_validator(mode="after")
def require_some_update(self) -> Self:
"""Reject empty update payloads to avoid no-op patch calls."""
if not self.model_fields_set:
raise ValueError("At least one field is required")
return self
class TagRead(TagBase):
"""Tag payload returned from API endpoints."""
id: UUID
organization_id: UUID
task_count: int = 0
created_at: datetime
updated_at: datetime

View File

@@ -10,13 +10,12 @@ from pydantic import field_validator, model_validator
from sqlmodel import Field, SQLModel
from app.schemas.common import NonEmptyStr
from app.schemas.tags import TagRef
TaskStatus = Literal["inbox", "in_progress", "review", "done"]
STATUS_REQUIRED_ERROR = "status is required"
# Keep these symbols as runtime globals so Pydantic can resolve
# deferred annotations reliably.
RUNTIME_ANNOTATION_TYPES = (datetime, UUID, NonEmptyStr, TagRef)
RUNTIME_ANNOTATION_TYPES = (datetime, UUID, NonEmptyStr)
class TaskBase(SQLModel):
@@ -29,7 +28,6 @@ class TaskBase(SQLModel):
due_at: datetime | None = None
assigned_agent_id: UUID | None = None
depends_on_task_ids: list[UUID] = Field(default_factory=list)
tag_ids: list[UUID] = Field(default_factory=list)
class TaskCreate(TaskBase):
@@ -48,7 +46,6 @@ class TaskUpdate(SQLModel):
due_at: datetime | None = None
assigned_agent_id: UUID | None = None
depends_on_task_ids: list[UUID] | None = None
tag_ids: list[UUID] | None = None
comment: NonEmptyStr | None = None
@field_validator("comment", mode="before")
@@ -80,7 +77,6 @@ class TaskRead(TaskBase):
updated_at: datetime
blocked_by_task_ids: list[UUID] = Field(default_factory=list)
is_blocked: bool = False
tags: list[TagRef] = Field(default_factory=list)
class TaskCommentCreate(SQLModel):

View File

@@ -12,7 +12,6 @@ from app.schemas.approvals import ApprovalRead
from app.schemas.board_groups import BoardGroupRead
from app.schemas.board_memory import BoardMemoryRead
from app.schemas.boards import BoardRead
from app.schemas.tags import TagRef
from app.schemas.tasks import TaskRead
RUNTIME_ANNOTATION_TYPES = (
@@ -23,7 +22,6 @@ RUNTIME_ANNOTATION_TYPES = (
BoardGroupRead,
BoardMemoryRead,
BoardRead,
TagRef,
)
@@ -59,7 +57,6 @@ class BoardGroupTaskSummary(SQLModel):
assignee: str | None = None
due_at: datetime | None = None
in_progress_at: datetime | None = None
tags: list[TagRef] = Field(default_factory=list)
created_at: datetime
updated_at: datetime

View File

@@ -11,7 +11,6 @@ from sqlmodel import col, select
from app.models.approval_task_links import ApprovalTaskLink
from app.models.approvals import Approval
from app.models.tasks import Task
if TYPE_CHECKING:
from sqlmodel.ext.asyncio.session import AsyncSession
@@ -122,88 +121,6 @@ async def replace_approval_task_links(
session.add(ApprovalTaskLink(approval_id=approval_id, task_id=task_id))
async def lock_tasks_for_approval(
session: AsyncSession,
*,
task_ids: Sequence[UUID],
) -> None:
"""Acquire row locks for task ids in deterministic order within a transaction."""
normalized_task_ids = sorted({*task_ids}, key=str)
if not normalized_task_ids:
return
statement = (
select(col(Task.id))
.where(col(Task.id).in_(normalized_task_ids))
.order_by(col(Task.id).asc())
.with_for_update()
)
# Materialize results so the lock query fully executes before proceeding.
_ = list(await session.exec(statement))
async def pending_approval_conflicts_by_task(
session: AsyncSession,
*,
board_id: UUID,
task_ids: Sequence[UUID],
exclude_approval_id: UUID | None = None,
) -> dict[UUID, UUID]:
"""Return the first conflicting pending approval id for each requested task id."""
normalized_task_ids = list({*task_ids})
if not normalized_task_ids:
return {}
linked_statement = (
select(
col(ApprovalTaskLink.task_id),
col(Approval.id),
col(Approval.created_at),
)
.join(Approval, col(Approval.id) == col(ApprovalTaskLink.approval_id))
.where(col(Approval.board_id) == board_id)
.where(col(Approval.status) == "pending")
.where(col(ApprovalTaskLink.task_id).in_(normalized_task_ids))
.order_by(col(Approval.created_at).asc(), col(Approval.id).asc())
)
if exclude_approval_id is not None:
linked_statement = linked_statement.where(col(Approval.id) != exclude_approval_id)
linked_rows = list(await session.exec(linked_statement))
conflicts: dict[UUID, UUID] = {}
for task_id, approval_id, _created_at in linked_rows:
conflicts.setdefault(task_id, approval_id)
legacy_statement = (
select(
col(Approval.task_id),
col(Approval.id),
col(Approval.created_at),
)
.where(col(Approval.board_id) == board_id)
.where(col(Approval.status) == "pending")
.where(col(Approval.task_id).is_not(None))
.where(col(Approval.task_id).in_(normalized_task_ids))
.where(
~exists(
select(1)
.where(col(ApprovalTaskLink.approval_id) == col(Approval.id))
.correlate(Approval),
),
)
.order_by(col(Approval.created_at).asc(), col(Approval.id).asc())
)
if exclude_approval_id is not None:
legacy_statement = legacy_statement.where(col(Approval.id) != exclude_approval_id)
legacy_rows = list(await session.exec(legacy_statement))
for legacy_task_id, approval_id, _created_at in legacy_rows:
if legacy_task_id is None:
continue
conflicts.setdefault(legacy_task_id, approval_id)
return conflicts
async def task_counts_for_board(
session: AsyncSession,
*,

View File

@@ -21,7 +21,6 @@ from app.schemas.view_models import (
BoardGroupSnapshot,
BoardGroupTaskSummary,
)
from app.services.tags import TagState, load_tag_state
if TYPE_CHECKING:
from sqlalchemy.sql.elements import ColumnElement
@@ -123,7 +122,6 @@ def _task_summaries_by_board(
boards_by_id: dict[UUID, Board],
tasks: list[Task],
agent_name_by_id: dict[UUID, str],
tag_state_by_task_id: dict[UUID, TagState],
per_board_task_limit: int,
) -> dict[UUID, list[BoardGroupTaskSummary]]:
"""Build limited per-board task summary lists."""
@@ -140,7 +138,6 @@ def _task_summaries_by_board(
if board is None:
continue
current.append(
# Include tags so cross-board snapshots can be grouped quickly in the UI.
BoardGroupTaskSummary(
id=task.id,
board_id=task.board_id,
@@ -156,7 +153,6 @@ def _task_summaries_by_board(
),
due_at=task.due_at,
in_progress_at=task.in_progress_at,
tags=tag_state_by_task_id.get(task.id, TagState()).tags,
created_at=task.created_at,
updated_at=task.updated_at,
),
@@ -191,15 +187,10 @@ async def build_group_snapshot(
include_done=include_done,
)
agent_name_by_id = await _agent_names(session, tasks)
tag_state_by_task_id = await load_tag_state(
session,
task_ids=[task.id for task in tasks],
)
tasks_by_board = _task_summaries_by_board(
boards_by_id=boards_by_id,
tasks=tasks,
agent_name_by_id=agent_name_by_id,
tag_state_by_task_id=tag_state_by_task_id,
per_board_task_limit=per_board_task_limit,
)
snapshots = [

View File

@@ -17,7 +17,6 @@ from app.schemas.boards import BoardRead
from app.schemas.view_models import BoardSnapshot, TaskCardRead
from app.services.approval_task_links import load_task_ids_by_approval, task_counts_for_board
from app.services.openclaw.provisioning_db import AgentLifecycleService
from app.services.tags import TagState, load_tag_state
from app.services.task_dependencies import (
blocked_by_dependency_ids,
dependency_ids_by_task_id,
@@ -36,21 +35,10 @@ def _memory_to_read(memory: BoardMemory) -> BoardMemoryRead:
return BoardMemoryRead.model_validate(memory, from_attributes=True)
def _approval_to_read(
approval: Approval,
*,
task_ids: list[UUID],
task_titles: list[str],
) -> ApprovalRead:
def _approval_to_read(approval: Approval, *, task_ids: list[UUID]) -> ApprovalRead:
model = ApprovalRead.model_validate(approval, from_attributes=True)
primary_task_id = task_ids[0] if task_ids else None
return model.model_copy(
update={
"task_id": primary_task_id,
"task_ids": task_ids,
"task_titles": task_titles,
},
)
return model.model_copy(update={"task_id": primary_task_id, "task_ids": task_ids})
def _task_to_card(
@@ -60,13 +48,11 @@ def _task_to_card(
counts_by_task_id: dict[UUID, tuple[int, int]],
deps_by_task_id: dict[UUID, list[UUID]],
dependency_status_by_id_map: dict[UUID, str],
tag_state_by_task_id: dict[UUID, TagState],
) -> TaskCardRead:
card = TaskCardRead.model_validate(task, from_attributes=True)
approvals_count, approvals_pending_count = counts_by_task_id.get(task.id, (0, 0))
assignee = agent_name_by_id.get(task.assigned_agent_id) if task.assigned_agent_id else None
depends_on_task_ids = deps_by_task_id.get(task.id, [])
tag_state = tag_state_by_task_id.get(task.id, TagState())
blocked_by_task_ids = blocked_by_dependency_ids(
dependency_ids=depends_on_task_ids,
status_by_id=dependency_status_by_id_map,
@@ -79,8 +65,6 @@ def _task_to_card(
"approvals_count": approvals_count,
"approvals_pending_count": approvals_pending_count,
"depends_on_task_ids": depends_on_task_ids,
"tag_ids": tag_state.tag_ids,
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_by_task_ids,
"is_blocked": bool(blocked_by_task_ids),
},
@@ -97,10 +81,6 @@ async def build_board_snapshot(session: AsyncSession, board: Board) -> BoardSnap
.all(session),
)
task_ids = [task.id for task in tasks]
tag_state_by_task_id = await load_tag_state(
session,
task_ids=task_ids,
)
deps_by_task_id = await dependency_ids_by_task_id(
session,
@@ -148,21 +128,13 @@ async def build_board_snapshot(session: AsyncSession, board: Board) -> BoardSnap
session,
approval_ids=approval_ids,
)
task_title_by_id = {task.id: task.title for task in tasks}
approval_reads = [
_approval_to_read(
approval,
task_ids=(
linked_task_ids := task_ids_by_approval.get(
approval.id,
[approval.task_id] if approval.task_id is not None else [],
)
task_ids=task_ids_by_approval.get(
approval.id,
[approval.task_id] if approval.task_id is not None else [],
),
task_titles=[
task_title_by_id[task_id]
for task_id in linked_task_ids
if task_id in task_title_by_id
],
)
for approval in approvals
]
@@ -176,7 +148,6 @@ async def build_board_snapshot(session: AsyncSession, board: Board) -> BoardSnap
counts_by_task_id=counts_by_task_id,
deps_by_task_id=deps_by_task_id,
dependency_status_by_id_map=dependency_status_by_id_map,
tag_state_by_task_id=tag_state_by_task_id,
)
for task in tasks
]

View File

@@ -5,16 +5,16 @@ from __future__ import annotations
import hashlib
from typing import Mapping
CONFIDENCE_THRESHOLD = 80.0
CONFIDENCE_THRESHOLD = 80
MIN_PLANNING_SIGNALS = 2
def compute_confidence(rubric_scores: Mapping[str, int]) -> float:
def compute_confidence(rubric_scores: Mapping[str, int]) -> int:
"""Compute aggregate confidence from rubric score components."""
return float(sum(rubric_scores.values()))
return int(sum(rubric_scores.values()))
def approval_required(*, confidence: float, is_external: bool, is_risky: bool) -> bool:
def approval_required(*, confidence: int, is_external: bool, is_risky: bool) -> bool:
"""Return whether an action must go through explicit approval."""
return is_external or is_risky or confidence < CONFIDENCE_THRESHOLD

View File

@@ -1,158 +0,0 @@
"""Helpers for validating and loading tags and tag mappings."""
from __future__ import annotations
import re
from collections import defaultdict
from collections.abc import Sequence
from dataclasses import dataclass, field
from typing import TYPE_CHECKING
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy import delete, func
from sqlmodel import col, select
from app.models.tag_assignments import TagAssignment
from app.models.tags import Tag
from app.schemas.tags import TagRef
if TYPE_CHECKING:
from sqlmodel.ext.asyncio.session import AsyncSession
SLUG_RE = re.compile(r"[^a-z0-9]+")
def slugify_tag(value: str) -> str:
"""Build a slug from arbitrary text using lowercase alphanumeric groups."""
slug = SLUG_RE.sub("-", value.lower()).strip("-")
return slug or "tag"
def _dedupe_uuid_list(values: Sequence[UUID]) -> list[UUID]:
deduped: list[UUID] = []
seen: set[UUID] = set()
for value in values:
if value in seen:
continue
seen.add(value)
deduped.append(value)
return deduped
async def validate_tag_ids(
session: AsyncSession,
*,
organization_id: UUID,
tag_ids: Sequence[UUID],
) -> list[UUID]:
"""Validate tag IDs within an organization and return deduped IDs."""
normalized = _dedupe_uuid_list(tag_ids)
if not normalized:
return []
existing_ids = set(
await session.exec(
select(Tag.id)
.where(col(Tag.organization_id) == organization_id)
.where(col(Tag.id).in_(normalized)),
),
)
missing = [tag_id for tag_id in normalized if tag_id not in existing_ids]
if missing:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail={
"message": "One or more tags do not exist in this organization.",
"missing_tag_ids": [str(tag_id) for tag_id in missing],
},
)
return normalized
@dataclass(slots=True)
class TagState:
"""Ordered tag state for a task payload."""
tag_ids: list[UUID] = field(default_factory=list)
tags: list[TagRef] = field(default_factory=list)
async def load_tag_state(
session: AsyncSession,
*,
task_ids: Sequence[UUID],
) -> dict[UUID, TagState]:
"""Return ordered tag IDs and refs for each task id."""
normalized_task_ids = _dedupe_uuid_list(task_ids)
if not normalized_task_ids:
return {}
rows = list(
await session.exec(
select(
col(TagAssignment.task_id),
Tag,
)
.join(Tag, col(Tag.id) == col(TagAssignment.tag_id))
.where(col(TagAssignment.task_id).in_(normalized_task_ids))
.order_by(
col(TagAssignment.task_id).asc(),
col(TagAssignment.created_at).asc(),
),
),
)
state_by_task_id: dict[UUID, TagState] = defaultdict(TagState)
for task_id, tag in rows:
if task_id is None:
continue
state = state_by_task_id[task_id]
state.tag_ids.append(tag.id)
state.tags.append(
TagRef(
id=tag.id,
name=tag.name,
slug=tag.slug,
color=tag.color,
),
)
return dict(state_by_task_id)
async def replace_tags(
session: AsyncSession,
*,
task_id: UUID,
tag_ids: Sequence[UUID],
) -> None:
"""Replace all tag-assignment rows for a task."""
normalized = _dedupe_uuid_list(tag_ids)
await session.exec(
delete(TagAssignment).where(
col(TagAssignment.task_id) == task_id,
),
)
for tag_id in normalized:
session.add(TagAssignment(task_id=task_id, tag_id=tag_id))
async def task_counts_for_tags(
session: AsyncSession,
*,
tag_ids: Sequence[UUID],
) -> dict[UUID, int]:
"""Return count of tagged tasks per tag id."""
normalized = _dedupe_uuid_list(tag_ids)
if not normalized:
return {}
rows = list(
await session.exec(
select(
col(TagAssignment.tag_id),
func.count(col(TagAssignment.task_id)),
)
.where(col(TagAssignment.tag_id).in_(normalized))
.group_by(col(TagAssignment.tag_id)),
),
)
return {tag_id: int(count or 0) for tag_id, count in rows}

View File

@@ -1,48 +0,0 @@
"""add indexes for board memory + task comments
Revision ID: 99cd6df95f85
Revises: f4d2b649e93a
Create Date: 2026-02-12 08:13:19.786621
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99cd6df95f85'
down_revision = 'f4d2b649e93a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Board memory lists filter on (board_id, is_chat) and order by created_at desc.
op.create_index(
"ix_board_memory_board_id_is_chat_created_at",
"board_memory",
["board_id", "is_chat", "created_at"],
)
# Task comments are stored as ActivityEvent rows with event_type='task.comment'.
# Listing comments uses task_id + created_at ordering, so a partial composite index
# avoids scanning other activity rows.
op.create_index(
"ix_activity_events_task_comment_task_id_created_at",
"activity_events",
["task_id", "created_at"],
postgresql_where=sa.text("event_type = 'task.comment'"),
)
def downgrade() -> None:
op.drop_index(
"ix_activity_events_task_comment_task_id_created_at",
table_name="activity_events",
)
op.drop_index(
"ix_board_memory_board_id_is_chat_created_at",
table_name="board_memory",
)

View File

@@ -1,45 +0,0 @@
"""add composite indexes for task listing
Revision ID: b4338be78eec
Revises: f4d2b649e93a
Create Date: 2026-02-12 07:54:27.450391
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b4338be78eec'
down_revision = 'f4d2b649e93a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Task list endpoints filter primarily by board_id, optionally by status
# and assigned_agent_id, and always order by created_at (desc in code).
# These composite btree indexes allow fast backward scans with LIMIT.
op.create_index(
"ix_tasks_board_id_created_at",
"tasks",
["board_id", "created_at"],
)
op.create_index(
"ix_tasks_board_id_status_created_at",
"tasks",
["board_id", "status", "created_at"],
)
op.create_index(
"ix_tasks_board_id_assigned_agent_id_created_at",
"tasks",
["board_id", "assigned_agent_id", "created_at"],
)
def downgrade() -> None:
op.drop_index("ix_tasks_board_id_assigned_agent_id_created_at", table_name="tasks")
op.drop_index("ix_tasks_board_id_status_created_at", table_name="tasks")
op.drop_index("ix_tasks_board_id_created_at", table_name="tasks")

View File

@@ -1,101 +0,0 @@
"""add tags and tag assignments
Revision ID: d8c1e5a4f7b2
Revises: 99cd6df95f85, b4338be78eec
Create Date: 2026-02-12 16:05:00.000000
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "d8c1e5a4f7b2"
down_revision = ("99cd6df95f85", "b4338be78eec")
branch_labels = None
depends_on = None
def upgrade() -> None:
bind = op.get_bind()
inspector = sa.inspect(bind)
if not inspector.has_table("tags"):
op.create_table(
"tags",
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("organization_id", sa.Uuid(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("slug", sa.String(), nullable=False),
sa.Column("color", sa.String(), nullable=False),
sa.Column("description", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"organization_id",
"slug",
name="uq_tags_organization_id_slug",
),
)
tag_indexes = {item.get("name") for item in inspector.get_indexes("tags")}
if op.f("ix_tags_organization_id") not in tag_indexes:
op.create_index(
op.f("ix_tags_organization_id"),
"tags",
["organization_id"],
unique=False,
)
if op.f("ix_tags_slug") not in tag_indexes:
op.create_index(
op.f("ix_tags_slug"),
"tags",
["slug"],
unique=False,
)
if not inspector.has_table("tag_assignments"):
op.create_table(
"tag_assignments",
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("task_id", sa.Uuid(), nullable=False),
sa.Column("tag_id", sa.Uuid(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["tag_id"], ["tags.id"]),
sa.ForeignKeyConstraint(["task_id"], ["tasks.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"task_id",
"tag_id",
name="uq_tag_assignments_task_id_tag_id",
),
)
assignment_indexes = {
item.get("name") for item in inspector.get_indexes("tag_assignments")
}
if op.f("ix_tag_assignments_task_id") not in assignment_indexes:
op.create_index(
op.f("ix_tag_assignments_task_id"),
"tag_assignments",
["task_id"],
unique=False,
)
if op.f("ix_tag_assignments_tag_id") not in assignment_indexes:
op.create_index(
op.f("ix_tag_assignments_tag_id"),
"tag_assignments",
["tag_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index(op.f("ix_tag_assignments_tag_id"), table_name="tag_assignments")
op.drop_index(op.f("ix_tag_assignments_task_id"), table_name="tag_assignments")
op.drop_table("tag_assignments")
op.drop_index(op.f("ix_tags_slug"), table_name="tags")
op.drop_index(op.f("ix_tags_organization_id"), table_name="tags")
op.drop_table("tags")

View File

@@ -1,39 +0,0 @@
"""make approval confidence float
Revision ID: e2f9c6b4a1d3
Revises: d8c1e5a4f7b2
Create Date: 2026-02-12 20:00:00.000000
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "e2f9c6b4a1d3"
down_revision = "d8c1e5a4f7b2"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.alter_column(
"approvals",
"confidence",
existing_type=sa.Integer(),
type_=sa.Float(),
existing_nullable=False,
)
def downgrade() -> None:
op.alter_column(
"approvals",
"confidence",
existing_type=sa.Float(),
type_=sa.Integer(),
existing_nullable=False,
)

View File

@@ -31,3 +31,4 @@ This file defines how you decide when to act vs when to ask.
## Collaboration defaults
- If you are idle/unassigned: pick 1 in-progress/review task owned by someone else and leave a concrete, helpful comment (context gaps, quality risks, validation ideas, edge cases, handoff clarity).
- If you notice duplicate work: flag it and propose a merge/split so there is one clear DRI per deliverable.

View File

@@ -146,7 +146,6 @@ run a short intake with the human in **board chat**.
2) Review recent tasks/comments and board memory:
- GET $BASE_URL/api/v1/agent/boards/$BOARD_ID/tasks?limit=50
- GET $BASE_URL/api/v1/agent/boards/$BOARD_ID/tags
- GET $BASE_URL/api/v1/agent/boards/$BOARD_ID/memory?limit=50
- GET $BASE_URL/api/v1/agent/agents?board_id=$BOARD_ID
- For any task in **review**, fetch its comments:
@@ -275,13 +274,9 @@ Body: {"depends_on_task_ids":["DEP_TASK_ID_1","DEP_TASK_ID_2"]}
7) Creating new tasks:
- Before creating any task or approval, run the de-duplication pass (step 2a). If a similar task already exists, merge/split scope there instead of creating a duplicate.
- Leads **can** create tasks directly when confidence >= 70 and the action is not risky/external.
- If tags are configured (`GET /api/v1/agent/boards/$BOARD_ID/tags` returns items), choose the most relevant tags and include their ids in `tag_ids`.
- Build and keep a local map: `slug/name -> tag_id`.
- Prefer 1-3 tags per task; avoid over-tagging.
- If no existing tag fits, set `tag_ids: []` and leave a short note in your plan/comment so admins can add a missing tag later.
POST $BASE_URL/api/v1/agent/boards/$BOARD_ID/tasks
Body example:
{"title":"...","description":"...","priority":"high","status":"inbox","assigned_agent_id":null,"depends_on_task_ids":["DEP_TASK_ID"],"tag_ids":["TAG_ID_1","TAG_ID_2"]}
{"title":"...","description":"...","priority":"high","status":"inbox","assigned_agent_id":null,"depends_on_task_ids":["DEP_TASK_ID"]}
- Task descriptions must be written in clear markdown (short sections, bullets/checklists when helpful).
- If the task depends on other tasks, always set `depends_on_task_ids`. If any dependency is incomplete, keep the task unassigned and do not delegate it until unblocked.
- If confidence < 70 or the action is risky/external, request approval instead:

View File

@@ -66,3 +66,4 @@ Notes:
| Date | Change |
|------|--------|
| | |

View File

@@ -1,56 +0,0 @@
# ruff: noqa: INP001
"""Regression test sketch for agent-token lookup complexity.
Context:
- Current implementation performs PBKDF2 verification in a loop over *all* agents
that have a token hash (`agent_auth._find_agent_for_token`).
- This is O(N_agents) *and* each verify is expensive (PBKDF2 200k iterations).
This test is marked xfail to document the desired behavior after a hardening
refactor: O(1) lookup + single hash verify.
Once token lookup is refactored, flip this to a normal passing test.
"""
from __future__ import annotations
from types import SimpleNamespace
import pytest
from app.core import agent_auth
@pytest.mark.asyncio
@pytest.mark.xfail(
reason="Known DoS risk: agent token verification is currently O(N_agents)."
" Refactor token scheme/lookup to O(1) and make this pass.",
strict=False,
)
async def test_agent_token_lookup_should_not_verify_more_than_once(
monkeypatch: pytest.MonkeyPatch,
) -> None:
# Fake a session that returns many agents with token hashes.
class _FakeSession:
async def exec(self, _stmt: object) -> list[object]:
agents = []
for i in range(50):
agents.append(
SimpleNamespace(agent_token_hash=f"pbkdf2_sha256$1$salt{i}$digest{i}")
)
return agents
calls = {"n": 0}
def _fake_verify(_token: str, _stored_hash: str) -> bool:
calls["n"] += 1
# Always invalid
return False
monkeypatch.setattr(agent_auth, "verify_agent_token", _fake_verify)
out = await agent_auth._find_agent_for_token(_FakeSession(), "invalid") # type: ignore[arg-type]
assert out is None
# Desired behavior after refactor: avoid linear scan.
assert calls["n"] <= 1

View File

@@ -1,99 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass
from uuid import UUID, uuid4
import pytest
from fastapi import HTTPException
from app.api import agent as agent_api
from app.core.agent_auth import AgentAuthContext
from app.models.agents import Agent
from app.models.boards import Board
from app.models.tags import Tag
@dataclass
class _FakeExecResult:
tags: list[Tag]
def all(self) -> list[Tag]:
return self.tags
@dataclass
class _FakeSession:
tags: list[Tag]
async def exec(self, _query: object) -> _FakeExecResult:
return _FakeExecResult(self.tags)
def _board() -> Board:
return Board(
id=uuid4(),
organization_id=uuid4(),
name="Delivery",
slug="delivery",
)
def _agent_ctx(*, board_id: UUID | None) -> AgentAuthContext:
return AgentAuthContext(
actor_type="agent",
agent=Agent(
id=uuid4(),
board_id=board_id,
gateway_id=uuid4(),
name="Lead",
is_board_lead=True,
),
)
@pytest.mark.asyncio
async def test_list_tags_returns_tag_refs() -> None:
board = _board()
session = _FakeSession(
tags=[
Tag(
id=uuid4(),
organization_id=board.organization_id,
name="Backend",
slug="backend",
color="0f172a",
),
Tag(
id=uuid4(),
organization_id=board.organization_id,
name="Urgent",
slug="urgent",
color="dc2626",
),
],
)
response = await agent_api.list_tags(
board=board,
session=session, # type: ignore[arg-type]
agent_ctx=_agent_ctx(board_id=board.id),
)
assert [tag.slug for tag in response] == ["backend", "urgent"]
assert response[0].name == "Backend"
assert response[1].color == "dc2626"
@pytest.mark.asyncio
async def test_list_tags_rejects_cross_board_agent() -> None:
board = _board()
session = _FakeSession(tags=[])
with pytest.raises(HTTPException) as exc:
await agent_api.list_tags(
board=board,
session=session, # type: ignore[arg-type]
agent_ctx=_agent_ctx(board_id=uuid4()),
)
assert exc.value.status_code == 403

View File

@@ -68,86 +68,80 @@ def test_normalize_task_ids_dedupes_and_merges_sources() -> None:
@pytest.mark.asyncio
async def test_task_counts_for_board_supports_multi_task_links_and_legacy_rows() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board_id, task_a, task_b, task_c = await _seed_board(session)
async with await _make_session(engine) as session:
board_id, task_a, task_b, task_c = await _seed_board(session)
approval_pending_multi = Approval(
board_id=board_id,
task_id=task_a,
action_type="task.update",
confidence=80,
status="pending",
)
approval_approved = Approval(
board_id=board_id,
task_id=task_a,
action_type="task.complete",
confidence=90,
status="approved",
)
approval_pending_two = Approval(
board_id=board_id,
task_id=task_b,
action_type="task.assign",
confidence=75,
status="pending",
)
approval_legacy = Approval(
board_id=board_id,
task_id=task_c,
action_type="task.comment",
confidence=65,
status="pending",
)
session.add(approval_pending_multi)
session.add(approval_approved)
session.add(approval_pending_two)
session.add(approval_legacy)
await session.flush()
approval_pending_multi = Approval(
board_id=board_id,
task_id=task_a,
action_type="task.update",
confidence=80,
status="pending",
)
approval_approved = Approval(
board_id=board_id,
task_id=task_a,
action_type="task.complete",
confidence=90,
status="approved",
)
approval_pending_two = Approval(
board_id=board_id,
task_id=task_b,
action_type="task.assign",
confidence=75,
status="pending",
)
approval_legacy = Approval(
board_id=board_id,
task_id=task_c,
action_type="task.comment",
confidence=65,
status="pending",
)
session.add(approval_pending_multi)
session.add(approval_approved)
session.add(approval_pending_two)
session.add(approval_legacy)
await session.flush()
session.add(
ApprovalTaskLink(approval_id=approval_pending_multi.id, task_id=task_a),
)
session.add(
ApprovalTaskLink(approval_id=approval_pending_multi.id, task_id=task_b),
)
session.add(ApprovalTaskLink(approval_id=approval_approved.id, task_id=task_a))
session.add(ApprovalTaskLink(approval_id=approval_pending_two.id, task_id=task_b))
session.add(ApprovalTaskLink(approval_id=approval_pending_two.id, task_id=task_c))
await session.commit()
session.add(
ApprovalTaskLink(approval_id=approval_pending_multi.id, task_id=task_a),
)
session.add(
ApprovalTaskLink(approval_id=approval_pending_multi.id, task_id=task_b),
)
session.add(ApprovalTaskLink(approval_id=approval_approved.id, task_id=task_a))
session.add(ApprovalTaskLink(approval_id=approval_pending_two.id, task_id=task_b))
session.add(ApprovalTaskLink(approval_id=approval_pending_two.id, task_id=task_c))
await session.commit()
counts = await task_counts_for_board(session, board_id=board_id)
counts = await task_counts_for_board(session, board_id=board_id)
assert counts[task_a] == (2, 1)
assert counts[task_b] == (2, 2)
assert counts[task_c] == (2, 2)
finally:
await engine.dispose()
assert counts[task_a] == (2, 1)
assert counts[task_b] == (2, 2)
assert counts[task_c] == (2, 2)
@pytest.mark.asyncio
async def test_load_task_ids_by_approval_preserves_insert_order() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board_id, task_a, task_b, task_c = await _seed_board(session)
async with await _make_session(engine) as session:
board_id, task_a, task_b, task_c = await _seed_board(session)
approval = Approval(
board_id=board_id,
task_id=task_a,
action_type="task.update",
confidence=88,
status="pending",
)
session.add(approval)
await session.flush()
session.add(ApprovalTaskLink(approval_id=approval.id, task_id=task_a))
session.add(ApprovalTaskLink(approval_id=approval.id, task_id=task_b))
session.add(ApprovalTaskLink(approval_id=approval.id, task_id=task_c))
await session.commit()
approval = Approval(
board_id=board_id,
task_id=task_a,
action_type="task.update",
confidence=88,
status="pending",
)
session.add(approval)
await session.flush()
session.add(ApprovalTaskLink(approval_id=approval.id, task_id=task_a))
session.add(ApprovalTaskLink(approval_id=approval.id, task_id=task_b))
session.add(ApprovalTaskLink(approval_id=approval.id, task_id=task_c))
await session.commit()
mapping = await load_task_ids_by_approval(session, approval_ids=[approval.id])
assert mapping[approval.id] == [task_a, task_b, task_c]
finally:
await engine.dispose()
mapping = await load_task_ids_by_approval(session, approval_ids=[approval.id])
assert mapping[approval.id] == [task_a, task_b, task_c]

View File

@@ -1,182 +0,0 @@
from __future__ import annotations
from uuid import UUID, uuid4
import pytest
from fastapi import HTTPException
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel
from sqlmodel.ext.asyncio.session import AsyncSession
from app.api import approvals as approvals_api
from app.models.boards import Board
from app.models.organizations import Organization
from app.models.tasks import Task
from app.schemas.approvals import ApprovalCreate, ApprovalUpdate
async def _make_engine() -> AsyncEngine:
engine = create_async_engine("sqlite+aiosqlite:///:memory:")
async with engine.connect() as conn, conn.begin():
await conn.run_sync(SQLModel.metadata.create_all)
return engine
async def _make_session(engine: AsyncEngine) -> AsyncSession:
return AsyncSession(engine, expire_on_commit=False)
async def _seed_board_with_tasks(
session: AsyncSession,
*,
task_count: int = 2,
) -> tuple[Board, list[UUID]]:
org_id = uuid4()
board = Board(id=uuid4(), organization_id=org_id, name="b", slug="b")
task_ids = [uuid4() for _ in range(task_count)]
session.add(Organization(id=org_id, name=f"org-{org_id}"))
session.add(board)
for task_id in task_ids:
session.add(Task(id=task_id, board_id=board.id, title=f"task-{task_id}"))
await session.commit()
return board, task_ids
@pytest.mark.asyncio
async def test_create_approval_rejects_duplicate_pending_for_same_task() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board, task_ids = await _seed_board_with_tasks(session, task_count=1)
task_id = task_ids[0]
created = await approvals_api.create_approval(
payload=ApprovalCreate(
action_type="task.execute",
task_id=task_id,
payload={"reason": "Initial execution needs confirmation."},
confidence=80,
status="pending",
),
board=board,
session=session,
)
assert created.task_titles == [f"task-{task_id}"]
with pytest.raises(HTTPException) as exc:
await approvals_api.create_approval(
payload=ApprovalCreate(
action_type="task.retry",
task_id=task_id,
payload={"reason": "Retry should still be gated."},
confidence=77,
status="pending",
),
board=board,
session=session,
)
assert exc.value.status_code == 409
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["message"] == "Each task can have only one pending approval."
assert len(detail["conflicts"]) == 1
assert detail["conflicts"][0]["task_id"] == str(task_id)
finally:
await engine.dispose()
@pytest.mark.asyncio
async def test_create_approval_rejects_pending_conflict_from_linked_task_ids() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board, task_ids = await _seed_board_with_tasks(session, task_count=2)
task_a, task_b = task_ids
created = await approvals_api.create_approval(
payload=ApprovalCreate(
action_type="task.batch_execute",
task_ids=[task_a, task_b],
payload={"reason": "Batch operation requires sign-off."},
confidence=85,
status="pending",
),
board=board,
session=session,
)
assert created.task_titles == [f"task-{task_a}", f"task-{task_b}"]
with pytest.raises(HTTPException) as exc:
await approvals_api.create_approval(
payload=ApprovalCreate(
action_type="task.execute",
task_id=task_b,
payload={"reason": "Single task overlaps with pending batch."},
confidence=70,
status="pending",
),
board=board,
session=session,
)
assert exc.value.status_code == 409
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["message"] == "Each task can have only one pending approval."
assert len(detail["conflicts"]) == 1
assert detail["conflicts"][0]["task_id"] == str(task_b)
finally:
await engine.dispose()
@pytest.mark.asyncio
async def test_update_approval_rejects_reopening_to_pending_with_existing_pending() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board, task_ids = await _seed_board_with_tasks(session, task_count=1)
task_id = task_ids[0]
pending = await approvals_api.create_approval(
payload=ApprovalCreate(
action_type="task.execute",
task_id=task_id,
payload={"reason": "Primary pending approval is active."},
confidence=83,
status="pending",
),
board=board,
session=session,
)
resolved = await approvals_api.create_approval(
payload=ApprovalCreate(
action_type="task.review",
task_id=task_id,
payload={"reason": "Review decision completed earlier."},
confidence=90,
status="approved",
),
board=board,
session=session,
)
with pytest.raises(HTTPException) as exc:
await approvals_api.update_approval(
approval_id=resolved.id, # type: ignore[arg-type]
payload=ApprovalUpdate(status="pending"),
board=board,
session=session,
)
assert exc.value.status_code == 409
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["message"] == "Each task can have only one pending approval."
assert detail["conflicts"] == [
{
"task_id": str(task_id),
"approval_id": str(pending.id),
},
]
finally:
await engine.dispose()

View File

@@ -1,60 +0,0 @@
from __future__ import annotations
import pytest
from pydantic import ValidationError
from app.schemas.approvals import ApprovalCreate
def test_approval_create_requires_confidence_score() -> None:
with pytest.raises(ValidationError, match="confidence"):
ApprovalCreate.model_validate(
{
"action_type": "task.update",
"payload": {"reason": "Missing confidence should fail."},
},
)
@pytest.mark.parametrize("confidence", [-1.0, 101.0])
def test_approval_create_rejects_out_of_range_confidence(confidence: float) -> None:
with pytest.raises(ValidationError, match="confidence"):
ApprovalCreate.model_validate(
{
"action_type": "task.update",
"payload": {"reason": "Confidence must be in range."},
"confidence": confidence,
},
)
def test_approval_create_requires_lead_reasoning() -> None:
with pytest.raises(ValidationError, match="lead reasoning is required"):
ApprovalCreate.model_validate(
{
"action_type": "task.update",
"confidence": 80,
},
)
def test_approval_create_accepts_nested_decision_reason() -> None:
model = ApprovalCreate.model_validate(
{
"action_type": "task.update",
"confidence": 80,
"payload": {"decision": {"reason": "Needs manual approval."}},
},
)
assert model.payload == {"decision": {"reason": "Needs manual approval."}}
def test_approval_create_accepts_float_confidence() -> None:
model = ApprovalCreate.model_validate(
{
"action_type": "task.update",
"confidence": 88.75,
"payload": {"reason": "Fractional confidence should be preserved."},
},
)
assert model.confidence == 88.75

View File

@@ -1,87 +0,0 @@
# ruff: noqa: INP001
"""Unit-level API tests for /auth bootstrap endpoint.
These tests intentionally override auth dependencies to avoid DB wiring and
focus on route-handler behavior (response shape + auth gating).
"""
from __future__ import annotations
import pytest
from fastapi import APIRouter, FastAPI
from httpx import ASGITransport, AsyncClient
from app.api.auth import router as auth_router
from app.core.auth import AuthContext, get_auth_context
from app.models.users import User
def _build_test_app(*, auth_ctx: AuthContext) -> FastAPI:
app = FastAPI()
api_v1 = APIRouter(prefix="/api/v1")
api_v1.include_router(auth_router)
app.include_router(api_v1)
async def _override_get_auth_context() -> AuthContext:
return auth_ctx
app.dependency_overrides[get_auth_context] = _override_get_auth_context
return app
async def _get(client: AsyncClient, path: str) -> tuple[int, dict]:
resp = await client.post(path)
payload = resp.json() if resp.content else {}
return resp.status_code, payload
@pytest.mark.asyncio
async def test_auth_bootstrap_returns_user_profile_when_authenticated() -> None:
user = User(clerk_user_id="user_123", email="user@example.com", name="User")
app = _build_test_app(auth_ctx=AuthContext(actor_type="user", user=user))
async with AsyncClient(
transport=ASGITransport(app=app),
base_url="http://testserver",
) as client:
status, payload = await _get(client, "/api/v1/auth/bootstrap")
assert status == 200
assert payload["clerk_user_id"] == "user_123"
assert payload["email"] == "user@example.com"
assert payload["name"] == "User"
assert payload["id"]
@pytest.mark.asyncio
async def test_auth_bootstrap_rejects_requests_without_user_context() -> None:
app = _build_test_app(auth_ctx=AuthContext(actor_type="user", user=None))
async with AsyncClient(
transport=ASGITransport(app=app),
base_url="http://testserver",
) as client:
status, payload = await _get(client, "/api/v1/auth/bootstrap")
assert status == 401
assert payload == {"detail": "Unauthorized"}
@pytest.mark.asyncio
async def test_auth_bootstrap_rejects_non_user_actor_type() -> None:
# Runtime behavior: handler checks `auth.actor_type != "user"`.
# Use a duck-typed object to simulate a non-user actor.
from types import SimpleNamespace
app = _build_test_app(
auth_ctx=SimpleNamespace(actor_type="agent", user=None), # type: ignore[arg-type]
)
async with AsyncClient(
transport=ASGITransport(app=app),
base_url="http://testserver",
) as client:
status, payload = await _get(client, "/api/v1/auth/bootstrap")
assert status == 401
assert payload == {"detail": "Unauthorized"}

View File

@@ -1,175 +0,0 @@
# ruff: noqa: INP001, S101
"""Tests for board onboarding start-session restart behavior."""
from __future__ import annotations
from dataclasses import dataclass, field
from types import SimpleNamespace
from typing import Any
from uuid import uuid4
import pytest
from app.api import board_onboarding
from app.core.time import utcnow
from app.models.board_onboarding import BoardOnboardingSession
from app.schemas.board_onboarding import BoardOnboardingStart
@dataclass
class _FakeScalarResult:
value: object | None
def first(self) -> object | None:
return self.value
@dataclass
class _FakeSession:
first_value: object | None
added: list[object] = field(default_factory=list)
committed: int = 0
refreshed: list[object] = field(default_factory=list)
async def exec(self, _statement: object) -> _FakeScalarResult:
return _FakeScalarResult(self.first_value)
def add(self, value: object) -> None:
self.added.append(value)
async def commit(self) -> None:
self.committed += 1
async def refresh(self, value: object) -> None:
self.refreshed.append(value)
@pytest.mark.asyncio
async def test_start_onboarding_redispatches_when_last_message_is_user(
monkeypatch: pytest.MonkeyPatch,
) -> None:
board_id = uuid4()
onboarding = BoardOnboardingSession(
board_id=board_id,
session_key="session-key",
status="active",
messages=[
{
"role": "user",
"content": "I prefer concise updates.",
"timestamp": utcnow().isoformat(),
},
],
)
session: Any = _FakeSession(first_value=onboarding)
board = SimpleNamespace(id=board_id, name="Roadmap", description="Build v1")
captured_calls: list[dict[str, object]] = []
class _FakeMessagingService:
def __init__(self, _session: object) -> None:
self._session = _session
async def dispatch_answer(
self,
*,
board: object,
onboarding: object,
answer_text: str,
correlation_id: str,
) -> None:
captured_calls.append(
{
"board": board,
"onboarding": onboarding,
"answer_text": answer_text,
"correlation_id": correlation_id,
},
)
monkeypatch.setattr(
board_onboarding,
"BoardOnboardingMessagingService",
_FakeMessagingService,
)
before = onboarding.updated_at
result = await board_onboarding.start_onboarding(
_payload=BoardOnboardingStart(),
board=board,
session=session,
)
assert result is onboarding
assert len(captured_calls) == 1
assert captured_calls[0]["answer_text"] == "I prefer concise updates."
assert str(captured_calls[0]["correlation_id"]).startswith("onboarding.resume:")
assert onboarding.updated_at >= before
assert session.added == [onboarding]
assert session.committed == 1
assert session.refreshed == [onboarding]
@pytest.mark.asyncio
async def test_start_onboarding_does_not_redispatch_when_waiting_for_user(
monkeypatch: pytest.MonkeyPatch,
) -> None:
board_id = uuid4()
onboarding = BoardOnboardingSession(
board_id=board_id,
session_key="session-key",
status="active",
messages=[
{
"role": "user",
"content": "I prefer concise updates.",
"timestamp": utcnow().isoformat(),
},
{
"role": "assistant",
"content": '{"question":"What is your timezone?","options":[{"id":"1","label":"UTC"}]}',
"timestamp": utcnow().isoformat(),
},
],
)
session: Any = _FakeSession(first_value=onboarding)
board = SimpleNamespace(id=board_id, name="Roadmap", description="Build v1")
captured_calls: list[dict[str, object]] = []
class _FakeMessagingService:
def __init__(self, _session: object) -> None:
self._session = _session
async def dispatch_answer(
self,
*,
board: object,
onboarding: object,
answer_text: str,
correlation_id: str,
) -> None:
captured_calls.append(
{
"board": board,
"onboarding": onboarding,
"answer_text": answer_text,
"correlation_id": correlation_id,
},
)
monkeypatch.setattr(
board_onboarding,
"BoardOnboardingMessagingService",
_FakeMessagingService,
)
result = await board_onboarding.start_onboarding(
_payload=BoardOnboardingStart(),
board=board,
session=session,
)
assert result is onboarding
assert captured_calls == []
assert session.added == []
assert session.committed == 0
assert session.refreshed == []

View File

@@ -14,7 +14,6 @@ from app.core.error_handling import (
_error_payload,
_get_request_id,
_http_exception_exception_handler,
_json_safe,
_request_validation_exception_handler,
_response_validation_exception_handler,
install_error_handling,
@@ -39,31 +38,6 @@ def test_request_validation_error_includes_request_id():
assert resp.headers.get(REQUEST_ID_HEADER) == body["request_id"]
def test_request_validation_error_handles_bytes_input_without_500():
class Payload(BaseModel):
content: str
app = FastAPI()
install_error_handling(app)
@app.put("/needs-object")
def needs_object(payload: Payload) -> dict[str, str]:
return {"content": payload.content}
client = TestClient(app, raise_server_exceptions=False)
resp = client.put(
"/needs-object",
content=b"plain-text-body",
headers={"content-type": "text/plain"},
)
assert resp.status_code == 422
body = resp.json()
assert isinstance(body.get("detail"), list)
assert isinstance(body.get("request_id"), str) and body["request_id"]
assert resp.headers.get(REQUEST_ID_HEADER) == body["request_id"]
def test_http_exception_includes_request_id():
app = FastAPI()
install_error_handling(app)
@@ -210,23 +184,6 @@ def test_error_payload_omits_request_id_when_none() -> None:
assert _error_payload(detail="x", request_id=None) == {"detail": "x"}
def test_json_safe_decodes_bytes() -> None:
assert _json_safe(b"abc") == "abc"
def test_json_safe_decodes_bytearray_and_memoryview() -> None:
assert _json_safe(bytearray(b"abc")) == "abc"
assert _json_safe(memoryview(b"abc")) == "abc"
def test_json_safe_falls_back_to_str() -> None:
class Thing:
def __str__(self) -> str:
return "thing"
assert _json_safe(Thing()) == "thing"
@pytest.mark.asyncio
async def test_request_validation_exception_wrapper_rejects_wrong_exception() -> None:
req = Request({"type": "http", "headers": [], "state": {}})

View File

@@ -1,87 +0,0 @@
from __future__ import annotations
from datetime import datetime, timedelta
import pytest
from app.api import metrics as metrics_api
from app.schemas.metrics import DashboardRangeKey
@pytest.mark.parametrize(
("range_key", "expected_bucket", "expected_duration"),
[
("24h", "hour", timedelta(hours=24)),
("3d", "day", timedelta(days=3)),
("7d", "day", timedelta(days=7)),
("14d", "day", timedelta(days=14)),
("1m", "day", timedelta(days=30)),
("3m", "week", timedelta(days=90)),
("6m", "week", timedelta(days=180)),
("1y", "month", timedelta(days=365)),
],
)
def test_resolve_range_maps_expected_window(
monkeypatch: pytest.MonkeyPatch,
range_key: DashboardRangeKey,
expected_bucket: str,
expected_duration: timedelta,
) -> None:
fixed_now = datetime(2026, 2, 12, 15, 30, 0)
monkeypatch.setattr(metrics_api, "utcnow", lambda: fixed_now)
spec = metrics_api._resolve_range(range_key)
assert spec.key == range_key
assert spec.bucket == expected_bucket
assert spec.duration == expected_duration
assert spec.start == fixed_now - expected_duration
assert spec.end == fixed_now
def test_comparison_range_is_previous_window(monkeypatch: pytest.MonkeyPatch) -> None:
fixed_now = datetime(2026, 2, 12, 15, 30, 0)
monkeypatch.setattr(metrics_api, "utcnow", lambda: fixed_now)
primary = metrics_api._resolve_range("14d")
comparison = metrics_api._comparison_range(primary)
assert comparison.key == primary.key
assert comparison.bucket == primary.bucket
assert comparison.duration == primary.duration
assert comparison.start == primary.start - primary.duration
assert comparison.end == primary.end - primary.duration
def test_week_buckets_align_to_monday(monkeypatch: pytest.MonkeyPatch) -> None:
fixed_now = datetime(2026, 2, 12, 15, 30, 0)
monkeypatch.setattr(metrics_api, "utcnow", lambda: fixed_now)
spec = metrics_api._resolve_range("3m")
buckets = metrics_api._build_buckets(spec)
assert buckets
assert all(bucket.weekday() == 0 for bucket in buckets)
assert all(
buckets[index + 1] - buckets[index] == timedelta(days=7)
for index in range(len(buckets) - 1)
)
def test_month_buckets_align_to_first_of_month(monkeypatch: pytest.MonkeyPatch) -> None:
fixed_now = datetime(2026, 2, 12, 15, 30, 0)
monkeypatch.setattr(metrics_api, "utcnow", lambda: fixed_now)
spec = metrics_api._resolve_range("1y")
buckets = metrics_api._build_buckets(spec)
assert buckets
assert all(
bucket.day == 1
and bucket.hour == 0
and bucket.minute == 0
and bucket.second == 0
and bucket.microsecond == 0
for bucket in buckets
)
assert len(buckets) >= 12

View File

@@ -1,128 +0,0 @@
# ruff: noqa
from __future__ import annotations
from dataclasses import dataclass, field
from uuid import uuid4
import pytest
from app.models.tags import Tag
from app.services import tags
@dataclass
class _FakeSession:
exec_results: list[object]
executed: list[object] = field(default_factory=list)
added: list[object] = field(default_factory=list)
async def exec(self, query):
self.executed.append(query)
if not self.exec_results:
raise AssertionError("No more exec_results left for session.exec")
return self.exec_results.pop(0)
def add(self, value):
self.added.append(value)
def test_slugify_tag_normalizes_text():
assert tags.slugify_tag("Release / QA") == "release-qa"
assert tags.slugify_tag(" ### ") == "tag"
@pytest.mark.asyncio
async def test_validate_tag_ids_dedupes_and_preserves_order():
org_id = uuid4()
tag_a = uuid4()
tag_b = uuid4()
session = _FakeSession(exec_results=[{tag_a, tag_b}])
result = await tags.validate_tag_ids(
session,
organization_id=org_id,
tag_ids=[tag_a, tag_b, tag_a],
)
assert result == [tag_a, tag_b]
@pytest.mark.asyncio
async def test_validate_tag_ids_rejects_missing_tags():
org_id = uuid4()
tag_a = uuid4()
missing = uuid4()
session = _FakeSession(exec_results=[{tag_a}])
with pytest.raises(tags.HTTPException) as exc:
await tags.validate_tag_ids(
session,
organization_id=org_id,
tag_ids=[tag_a, missing],
)
assert exc.value.status_code == 404
assert exc.value.detail["missing_tag_ids"] == [str(missing)]
@pytest.mark.asyncio
async def test_load_tag_state_groups_rows_by_task_id():
task_a = uuid4()
task_b = uuid4()
tag_a = uuid4()
tag_b = uuid4()
session = _FakeSession(
exec_results=[
[
(
task_a,
Tag(
id=tag_a,
organization_id=uuid4(),
name="Backend",
slug="backend",
color="0f172a",
),
),
(
task_a,
Tag(
id=tag_b,
organization_id=uuid4(),
name="Urgent",
slug="urgent",
color="dc2626",
),
),
(
task_b,
Tag(
id=tag_b,
organization_id=uuid4(),
name="Urgent",
slug="urgent",
color="dc2626",
),
),
],
],
)
state = await tags.load_tag_state(
session,
task_ids=[task_a, task_b],
)
assert state[task_a].tag_ids == [tag_a, tag_b]
assert [tag.name for tag in state[task_a].tags] == ["Backend", "Urgent"]
assert state[task_b].tag_ids == [tag_b]
@pytest.mark.asyncio
async def test_replace_tags_replaces_existing_links():
task_id = uuid4()
tag_a = uuid4()
tag_b = uuid4()
session = _FakeSession(exec_results=[None])
await tags.replace_tags(
session,
task_id=task_id,
tag_ids=[tag_a, tag_b, tag_a],
)
assert len(session.executed) == 1
assert len(session.added) == 2

View File

@@ -43,151 +43,128 @@ async def _seed_board_and_tasks(
@pytest.mark.asyncio
async def test_validate_dependency_update_rejects_self_dependency() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board_id = uuid4()
task_id = uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[task_id])
async with await _make_session(engine) as session:
board_id = uuid4()
task_id = uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[task_id])
with pytest.raises(HTTPException) as exc:
await td.validate_dependency_update(
session,
board_id=board_id,
task_id=task_id,
depends_on_task_ids=[task_id],
)
assert exc.value.status_code == 422
finally:
await engine.dispose()
with pytest.raises(HTTPException) as exc:
await td.validate_dependency_update(
session,
board_id=board_id,
task_id=task_id,
depends_on_task_ids=[task_id],
)
assert exc.value.status_code == 422
@pytest.mark.asyncio
async def test_validate_dependency_update_404s_when_dependency_missing() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board_id = uuid4()
task_id = uuid4()
dep_id = uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[task_id])
async with await _make_session(engine) as session:
board_id = uuid4()
task_id = uuid4()
dep_id = uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[task_id])
with pytest.raises(HTTPException) as exc:
await td.validate_dependency_update(
session,
board_id=board_id,
task_id=task_id,
depends_on_task_ids=[dep_id],
)
assert exc.value.status_code == 404
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["missing_task_ids"] == [str(dep_id)]
finally:
await engine.dispose()
with pytest.raises(HTTPException) as exc:
await td.validate_dependency_update(
session,
board_id=board_id,
task_id=task_id,
depends_on_task_ids=[dep_id],
)
assert exc.value.status_code == 404
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["missing_task_ids"] == [str(dep_id)]
@pytest.mark.asyncio
async def test_validate_dependency_update_detects_cycle() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board_id = uuid4()
a, b = uuid4(), uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[a, b])
async with await _make_session(engine) as session:
board_id = uuid4()
a, b = uuid4(), uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[a, b])
# existing edge a -> b
session.add(TaskDependency(board_id=board_id, task_id=a, depends_on_task_id=b))
await session.commit()
# existing edge a -> b
session.add(TaskDependency(board_id=board_id, task_id=a, depends_on_task_id=b))
await session.commit()
# update b -> a introduces cycle
with pytest.raises(HTTPException) as exc:
await td.validate_dependency_update(
session,
board_id=board_id,
task_id=b,
depends_on_task_ids=[a],
)
assert exc.value.status_code == 409
finally:
await engine.dispose()
# update b -> a introduces cycle
with pytest.raises(HTTPException) as exc:
await td.validate_dependency_update(
session,
board_id=board_id,
task_id=b,
depends_on_task_ids=[a],
)
assert exc.value.status_code == 409
@pytest.mark.asyncio
async def test_dependency_queries_and_replace_and_dependents() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
board_id = uuid4()
t1, t2, t3 = uuid4(), uuid4(), uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[t1, t2, t3])
async with await _make_session(engine) as session:
board_id = uuid4()
t1, t2, t3 = uuid4(), uuid4(), uuid4()
await _seed_board_and_tasks(session, board_id=board_id, task_ids=[t1, t2, t3])
# seed deps: t1 depends on t2 then t3
session.add(TaskDependency(board_id=board_id, task_id=t1, depends_on_task_id=t2))
session.add(TaskDependency(board_id=board_id, task_id=t1, depends_on_task_id=t3))
await session.commit()
# seed deps: t1 depends on t2 then t3
session.add(TaskDependency(board_id=board_id, task_id=t1, depends_on_task_id=t2))
session.add(TaskDependency(board_id=board_id, task_id=t1, depends_on_task_id=t3))
await session.commit()
# cover empty input short-circuit
assert await td.dependency_ids_by_task_id(session, board_id=board_id, task_ids=[]) == {}
# cover empty input short-circuit
assert await td.dependency_ids_by_task_id(session, board_id=board_id, task_ids=[]) == {}
deps_map = await td.dependency_ids_by_task_id(
session, board_id=board_id, task_ids=[t1, t2]
)
assert deps_map[t1] == [t2, t3]
assert deps_map.get(t2, []) == []
deps_map = await td.dependency_ids_by_task_id(session, board_id=board_id, task_ids=[t1, t2])
assert deps_map[t1] == [t2, t3]
assert deps_map.get(t2, []) == []
# mark t2 done, t3 not
task2 = (await session.exec(select(Task).where(col(Task.id) == t2))).first()
assert task2 is not None
task2.status = td.DONE_STATUS
await session.commit()
# mark t2 done, t3 not
task2 = (await session.exec(select(Task).where(col(Task.id) == t2))).first()
assert task2 is not None
task2.status = td.DONE_STATUS
await session.commit()
# cover empty input short-circuit
assert (
await td.dependency_status_by_id(session, board_id=board_id, dependency_ids=[])
== {}
)
# cover empty input short-circuit
assert await td.dependency_status_by_id(session, board_id=board_id, dependency_ids=[]) == {}
status_map = await td.dependency_status_by_id(
session, board_id=board_id, dependency_ids=[t2, t3]
)
assert status_map[t2] == td.DONE_STATUS
assert status_map[t3] != td.DONE_STATUS
status_map = await td.dependency_status_by_id(
session, board_id=board_id, dependency_ids=[t2, t3]
)
assert status_map[t2] == td.DONE_STATUS
assert status_map[t3] != td.DONE_STATUS
blocked = await td.blocked_by_for_task(session, board_id=board_id, task_id=t1)
assert blocked == [t3]
blocked = await td.blocked_by_for_task(session, board_id=board_id, task_id=t1)
assert blocked == [t3]
# cover early return when no deps provided
assert (
await td.blocked_by_for_task(
session, board_id=board_id, task_id=t1, dependency_ids=[]
)
== []
)
# cover early return when no deps provided
assert (
await td.blocked_by_for_task(session, board_id=board_id, task_id=t1, dependency_ids=[])
== []
)
# replace deps with duplicates (deduped) -> [t3]
out = await td.replace_task_dependencies(
session,
board_id=board_id,
task_id=t1,
depends_on_task_ids=[t3, t3],
)
await session.commit()
assert out == [t3]
# replace deps with duplicates (deduped) -> [t3]
out = await td.replace_task_dependencies(
session,
board_id=board_id,
task_id=t1,
depends_on_task_ids=[t3, t3],
)
await session.commit()
assert out == [t3]
deps_map2 = await td.dependency_ids_by_task_id(
session, board_id=board_id, task_ids=[t1]
)
assert deps_map2[t1] == [t3]
deps_map2 = await td.dependency_ids_by_task_id(session, board_id=board_id, task_ids=[t1])
assert deps_map2[t1] == [t3]
dependents = await td.dependent_task_ids(
session, board_id=board_id, dependency_task_id=t3
)
assert dependents == [t1]
dependents = await td.dependent_task_ids(session, board_id=board_id, dependency_task_id=t3)
assert dependents == [t1]
# also exercise explicit dependency_ids passed
blocked2 = await td.blocked_by_for_task(
session, board_id=board_id, task_id=t1, dependency_ids=[t3]
)
assert blocked2 == [t3]
finally:
await engine.dispose()
# also exercise explicit dependency_ids passed
blocked2 = await td.blocked_by_for_task(
session, board_id=board_id, task_id=t1, dependency_ids=[t3]
)
assert blocked2 == [t3]

View File

@@ -5,7 +5,7 @@ from uuid import uuid4
import pytest
from app.api.tasks import _coerce_task_event_rows, _task_event_payload
from app.api.tasks import _coerce_task_event_rows
from app.models.activity_events import ActivityEvent
from app.models.tasks import Task
@@ -51,67 +51,3 @@ def test_coerce_task_event_rows_accepts_row_like_values():
def test_coerce_task_event_rows_rejects_invalid_values():
with pytest.raises(TypeError, match="Expected \\(ActivityEvent, Task \\| None\\) rows"):
_coerce_task_event_rows([("bad", "row")])
def test_task_event_payload_includes_activity_for_comment_event() -> None:
task = Task(board_id=uuid4(), title="Ship patch")
event = ActivityEvent(
event_type="task.comment",
message="Looks good.",
task_id=task.id,
agent_id=uuid4(),
)
payload = _task_event_payload(
event,
task,
deps_map={},
dep_status={},
tag_state_by_task_id={},
)
assert payload["type"] == "task.comment"
assert payload["activity"] == {
"id": str(event.id),
"event_type": "task.comment",
"message": "Looks good.",
"agent_id": str(event.agent_id),
"task_id": str(task.id),
"created_at": event.created_at.isoformat(),
}
comment = payload["comment"]
assert isinstance(comment, dict)
assert comment["id"] == str(event.id)
assert comment["task_id"] == str(task.id)
assert comment["message"] == "Looks good."
def test_task_event_payload_includes_activity_for_non_comment_event() -> None:
task = Task(board_id=uuid4(), title="Wire stream events", status="in_progress")
event = ActivityEvent(
event_type="task.updated",
message="Task updated: Wire stream events.",
task_id=task.id,
)
payload = _task_event_payload(
event,
task,
deps_map={},
dep_status={},
tag_state_by_task_id={},
)
assert payload["type"] == "task.updated"
assert payload["activity"] == {
"id": str(event.id),
"event_type": "task.updated",
"message": "Task updated: Wire stream events.",
"agent_id": None,
"task_id": str(task.id),
"created_at": event.created_at.isoformat(),
}
task_payload = payload["task"]
assert isinstance(task_payload, dict)
assert task_payload["id"] == str(task.id)
assert task_payload["is_blocked"] is False

View File

@@ -1,58 +0,0 @@
# Development workflow
This page describes a contributor-friendly local dev loop.
## Prereqs
- Python 3.12+
- Node 22+
- Docker + Docker Compose v2 (`docker compose`)
- `uv` (installed automatically by `make backend-sync` in CI; for local install see https://docs.astral.sh/uv/)
## Fast local loop (DB in Docker, apps on host)
1) Start Postgres (via compose)
```bash
cp .env.example .env
# Configure .env as needed (see root README for auth-mode notes)
docker compose -f compose.yml --env-file .env up -d db
```
2) Install deps
```bash
make setup
```
3) Apply DB migrations
```bash
make backend-migrate
```
4) Run backend (dev)
```bash
cd backend
uv run uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
```
5) Run frontend (dev)
In another terminal:
```bash
cd frontend
npm run dev -- --hostname 0.0.0.0 --port 3000
```
Open:
- Frontend: http://localhost:3000
- Backend health: http://localhost:8000/healthz
## Notes
- If you want the fully-containerized stack instead, see the root READMEs compose instructions.
- If you add new env vars, prefer documenting them in `.env.example` and linking from docs rather than pasting secrets.

View File

@@ -1,19 +0,0 @@
# Mission Control docs
This folder is the starting point for Mission Control documentation.
## Sections
- [Development workflow](./03-development.md)
- [Testing guide](./testing/README.md)
- [Coverage policy](./coverage-policy.md)
- [Deployment](./deployment/README.md)
- [Production notes](./production/README.md)
- [Troubleshooting](./troubleshooting/README.md)
- [Release checklist](./release/README.md)
- [Gateway WebSocket protocol](./openclaw_gateway_ws.md)
## Status
These pages are minimal placeholders so repo-relative links stay healthy. The actual docs
information architecture will be defined in the Docs overhaul tasks.

View File

@@ -1,3 +0,0 @@
# Coverage policy
Placeholder: coverage policy is currently documented in the root `Makefile` (`backend-coverage`).

View File

@@ -1,3 +0,0 @@
# Deployment guide
Placeholder.

View File

@@ -1,3 +0,0 @@
# Gateway WebSocket protocol
Placeholder.

View File

@@ -1,3 +0,0 @@
# Production notes
Placeholder.

View File

@@ -1,104 +0,0 @@
# Release checklist (maintainers)
This project does not yet have a fully automated release pipeline.
Use this checklist for **manual releases** and as a place to evolve the process over time.
## Before you start
- [ ] You have maintainer permissions on `abhi1693/openclaw-mission-control`
- [ ] You know which **scope** youre releasing:
- [ ] backend-only change
- [ ] frontend-only change
- [ ] infra/docs-only change
- [ ] full release
## 1) Pick the release commit
- [ ] Ensure youre on the latest `master`:
```bash
git fetch origin
git checkout master
git reset --hard origin/master
```
- [ ] Identify the commit SHA you want to release (usually `origin/master`).
## 2) Verify CI + local checks
- [ ] CI is green on the target commit.
- [ ] Run the local CI-parity checks:
```bash
make setup
make check
make docs-check
```
## 3) Smoke test (recommended)
If the change affects runtime behavior (API/UI/auth), do a quick smoke test using the compose stack.
```bash
cp .env.example .env
# Configure .env (see repo README for auth mode notes)
docker compose -f compose.yml --env-file .env up -d --build
after_up() {
echo "Frontend: http://localhost:3000"
echo "Backend health: http://localhost:8000/healthz"
}
after_up
```
- [ ] Validate basic flows (at minimum):
- [ ] frontend loads
- [ ] backend `/healthz` returns 200
- [ ] auth mode behaves as expected (local or clerk)
## 4) Prepare release notes
There is no enforced changelog format yet.
Choose one:
- [ ] GitHub Release notes (recommended for now)
- [ ] `CHANGELOG.md` (TODO: adopt Keep a Changelog)
Minimum release notes:
- [ ] Summary of key changes
- [ ] Any config changes / env var changes
- [ ] Any DB migration notes
- [ ] Known issues
## 5) Tag + publish
Tagging convention:
- Use semver tags: `vMAJOR.MINOR.PATCH` (e.g. `v1.4.0`).
- Create a GitHub Release from the tag (release notes can be manual for now).
- If the repo already has existing tags/releases, mirror the established convention exactly.
Suggested manual flow:
```bash
# Example (adjust once tag conventions are decided)
git tag -a v0.1.0 -m "Release v0.1.0"
git push origin v0.1.0
```
- [ ] Create a GitHub Release from the tag
- [ ] Paste the release notes
## 6) Post-release
- [ ] Monitor new issues for regressions
- [ ] If you changed public behavior, ensure docs are updated (README + docs pages)
## Notes / follow-ups
- Consider adding:
- automated versioning (changesets / semantic-release)
- release workflow in `.github/workflows/release.yml`
- a `CHANGELOG.md`

View File

@@ -1,57 +0,0 @@
# Testing guide
This repos CI parity entrypoint is:
```bash
make check
```
That target runs lint + typecheck + unit tests + coverage gate + frontend build.
## Quick commands
From repo root:
```bash
make setup
# Format + lint + typecheck + tests + build (CI parity)
make check
```
Docs-only quality gates:
```bash
make docs-check
```
## Backend
```bash
make backend-lint
make backend-typecheck
make backend-test
make backend-coverage
```
## Frontend
```bash
make frontend-lint
make frontend-typecheck
make frontend-test
make frontend-build
```
## E2E (Cypress)
CI runs Cypress E2E in `.github/workflows/ci.yml`.
Locally, the frontend package.json provides the E2E script:
```bash
cd frontend
npm run e2e
```
If your E2E tests require auth, ensure you have the necessary env configured (see root README for auth mode). Do **not** paste tokens into docs.

View File

@@ -1,3 +0,0 @@
# Troubleshooting
Placeholder.

View File

@@ -14,13 +14,6 @@ export default defineConfig({
baseUrl: "http://localhost:3000",
specPattern: "cypress/e2e/**/*.cy.{js,jsx,ts,tsx}",
supportFile: "cypress/support/e2e.ts",
// Clerk helpers perform async work inside `cy.then()`. CI can be slow enough
// that Cypress' 4s default command timeout flakes.
defaultCommandTimeout: 20_000,
retries: {
runMode: 2,
openMode: 0,
},
setupNodeEvents(on, config) {
return clerkSetup({ config });
},

View File

@@ -4,14 +4,30 @@ describe("/activity feed", () => {
const apiBase = "**/api/v1";
const email = Cypress.env("CLERK_TEST_EMAIL") || "jane+clerk_test@example.com";
function stubSseEmpty(pathGlob: string, alias: string) {
cy.intercept("GET", pathGlob, {
statusCode: 200,
headers: {
"content-type": "text/event-stream",
const originalDefaultCommandTimeout = Cypress.config("defaultCommandTimeout");
beforeEach(() => {
// Clerk's Cypress helpers perform async work inside `cy.then()`.
// CI can be slow enough that the default 4s command timeout flakes.
Cypress.config("defaultCommandTimeout", 20_000);
});
afterEach(() => {
Cypress.config("defaultCommandTimeout", originalDefaultCommandTimeout);
});
function stubStreamEmpty() {
cy.intercept(
"GET",
`${apiBase}/activity/task-comments/stream*`,
{
statusCode: 200,
headers: {
"content-type": "text/event-stream",
},
body: "",
},
body: "",
}).as(alias);
).as("activityStream");
}
function assertSignedInAndLanded() {
@@ -19,52 +35,35 @@ describe("/activity feed", () => {
cy.contains(/live feed/i).should("be.visible");
}
it("auth negative: signed-out user is redirected to sign-in", () => {
// SignedOutPanel runs in redirect mode on this page.
it("auth negative: signed-out user cannot access /activity", () => {
// Story: signed-out user tries to visit /activity and is redirected to sign-in.
cy.visit("/activity");
cy.location("pathname", { timeout: 20_000 }).should("match", /\/sign-in/);
});
it("happy path: renders feed items from the activity endpoint", () => {
cy.intercept("GET", `${apiBase}/boards*`, {
statusCode: 200,
body: {
items: [{ id: "b1", name: "Testing", updated_at: "2026-02-07T00:00:00Z" }],
},
}).as("boardsList");
cy.intercept("GET", `${apiBase}/boards/b1/snapshot*`, {
statusCode: 200,
body: {
tasks: [{ id: "t1", title: "CI hardening" }],
agents: [],
approvals: [],
chat_messages: [],
},
}).as("boardSnapshot");
cy.intercept("GET", `${apiBase}/activity*`, {
it("happy path: renders task comment cards", () => {
cy.intercept("GET", `${apiBase}/activity/task-comments*`, {
statusCode: 200,
body: {
items: [
{
id: "evt-1",
created_at: "2026-02-07T00:00:00Z",
event_type: "task.comment",
id: "c1",
message: "Hello world",
agent_id: null,
agent_name: "Kunal",
agent_role: "QA 2",
board_id: "b1",
board_name: "Testing",
task_id: "t1",
task_title: "CI hardening",
created_at: "2026-02-07T00:00:00Z",
},
],
},
}).as("activityList");
// Prevent SSE connections from hanging the test.
stubSseEmpty(`${apiBase}/boards/b1/tasks/stream*`, "tasksStream");
stubSseEmpty(`${apiBase}/boards/b1/approvals/stream*`, "approvalsStream");
stubSseEmpty(`${apiBase}/boards/b1/memory/stream*`, "memoryStream");
stubSseEmpty(`${apiBase}/agents/stream*`, "agentsStream");
stubStreamEmpty();
// Story: user signs in, then visits /activity and sees the live feed.
cy.visit("/sign-in");
cy.clerkLoaded();
cy.clerkSignIn({ strategy: "email_code", identifier: email });
@@ -77,18 +76,14 @@ describe("/activity feed", () => {
});
it("empty state: shows waiting message when no items", () => {
cy.intercept("GET", `${apiBase}/boards*`, {
statusCode: 200,
body: { items: [] },
}).as("boardsList");
cy.intercept("GET", `${apiBase}/activity*`, {
cy.intercept("GET", `${apiBase}/activity/task-comments*`, {
statusCode: 200,
body: { items: [] },
}).as("activityList");
stubSseEmpty(`${apiBase}/agents/stream*`, "agentsStream");
stubStreamEmpty();
// Story: user signs in, then visits /activity and sees an empty-state message.
cy.visit("/sign-in");
cy.clerkLoaded();
cy.clerkSignIn({ strategy: "email_code", identifier: email });
@@ -96,22 +91,18 @@ describe("/activity feed", () => {
cy.visit("/activity");
assertSignedInAndLanded();
cy.contains(/waiting for new activity/i).should("be.visible");
cy.contains(/waiting for new comments/i).should("be.visible");
});
it("error state: shows failure UI when API errors", () => {
cy.intercept("GET", `${apiBase}/boards*`, {
statusCode: 200,
body: { items: [] },
}).as("boardsList");
cy.intercept("GET", `${apiBase}/activity*`, {
cy.intercept("GET", `${apiBase}/activity/task-comments*`, {
statusCode: 500,
body: { detail: "boom" },
}).as("activityList");
stubSseEmpty(`${apiBase}/agents/stream*`, "agentsStream");
stubStreamEmpty();
// Story: user signs in, then visits /activity; API fails and user sees an error.
cy.visit("/sign-in");
cy.clerkLoaded();
cy.clerkSignIn({ strategy: "email_code", identifier: email });
@@ -119,6 +110,6 @@ describe("/activity feed", () => {
cy.visit("/activity");
assertSignedInAndLanded();
cy.contains(/unable to load activity feed|boom/i).should("be.visible");
cy.contains(/unable to load feed|boom/i).should("be.visible");
});
});

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0
@@ -54,7 +54,6 @@ import type {
ListTasksApiV1AgentBoardsBoardIdTasksGetParams,
OkResponse,
SoulUpdateRequest,
TagRef,
TaskCommentCreate,
TaskCommentRead,
TaskCreate,
@@ -1169,215 +1168,6 @@ export const useCreateTaskApiV1AgentBoardsBoardIdTasksPost = <
queryClient,
);
};
/**
* List tags available to the board's organization.
* @summary List Tags
*/
export type listTagsApiV1AgentBoardsBoardIdTagsGetResponse200 = {
data: TagRef[];
status: 200;
};
export type listTagsApiV1AgentBoardsBoardIdTagsGetResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type listTagsApiV1AgentBoardsBoardIdTagsGetResponseSuccess =
listTagsApiV1AgentBoardsBoardIdTagsGetResponse200 & {
headers: Headers;
};
export type listTagsApiV1AgentBoardsBoardIdTagsGetResponseError =
listTagsApiV1AgentBoardsBoardIdTagsGetResponse422 & {
headers: Headers;
};
export type listTagsApiV1AgentBoardsBoardIdTagsGetResponse =
| listTagsApiV1AgentBoardsBoardIdTagsGetResponseSuccess
| listTagsApiV1AgentBoardsBoardIdTagsGetResponseError;
export const getListTagsApiV1AgentBoardsBoardIdTagsGetUrl = (
boardId: string,
) => {
return `/api/v1/agent/boards/${boardId}/tags`;
};
export const listTagsApiV1AgentBoardsBoardIdTagsGet = async (
boardId: string,
options?: RequestInit,
): Promise<listTagsApiV1AgentBoardsBoardIdTagsGetResponse> => {
return customFetch<listTagsApiV1AgentBoardsBoardIdTagsGetResponse>(
getListTagsApiV1AgentBoardsBoardIdTagsGetUrl(boardId),
{
...options,
method: "GET",
},
);
};
export const getListTagsApiV1AgentBoardsBoardIdTagsGetQueryKey = (
boardId: string,
) => {
return [`/api/v1/agent/boards/${boardId}/tags`] as const;
};
export const getListTagsApiV1AgentBoardsBoardIdTagsGetQueryOptions = <
TData = Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
) => {
const { query: queryOptions, request: requestOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getListTagsApiV1AgentBoardsBoardIdTagsGetQueryKey(boardId);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>
> = ({ signal }) =>
listTagsApiV1AgentBoardsBoardIdTagsGet(boardId, {
signal,
...requestOptions,
});
return {
queryKey,
queryFn,
enabled: !!boardId,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
TData
> & { queryKey: DataTag<QueryKey, TData, TError> };
};
export type ListTagsApiV1AgentBoardsBoardIdTagsGetQueryResult = NonNullable<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>
>;
export type ListTagsApiV1AgentBoardsBoardIdTagsGetQueryError =
HTTPValidationError;
export function useListTagsApiV1AgentBoardsBoardIdTagsGet<
TData = Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError = HTTPValidationError,
>(
boardId: string,
options: {
query: Partial<
UseQueryOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
TData
>
> &
Pick<
DefinedInitialDataOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>
>,
"initialData"
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): DefinedUseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
export function useListTagsApiV1AgentBoardsBoardIdTagsGet<
TData = Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
TData
>
> &
Pick<
UndefinedInitialDataOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>
>,
"initialData"
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
export function useListTagsApiV1AgentBoardsBoardIdTagsGet<
TData = Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
/**
* @summary List Tags
*/
export function useListTagsApiV1AgentBoardsBoardIdTagsGet<
TData = Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<ReturnType<typeof listTagsApiV1AgentBoardsBoardIdTagsGet>>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
} {
const queryOptions = getListTagsApiV1AgentBoardsBoardIdTagsGetQueryOptions(
boardId,
options,
);
const query = useQuery(queryOptions, queryClient) as UseQueryResult<
TData,
TError
> & { queryKey: DataTag<QueryKey, TData, TError> };
return { ...query, queryKey: queryOptions.queryKey };
}
/**
* Update a task after board-level access checks.
* @summary Update Task
@@ -3707,162 +3497,6 @@ export const useUpdateAgentSoulApiV1AgentBoardsBoardIdAgentsAgentIdSoulPut = <
queryClient,
);
};
/**
* Delete a board agent as the board lead.
* @summary Delete Board Agent
*/
export type deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse200 =
{
data: OkResponse;
status: 200;
};
export type deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse422 =
{
data: HTTPValidationError;
status: 422;
};
export type deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponseSuccess =
deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse200 & {
headers: Headers;
};
export type deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponseError =
deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse422 & {
headers: Headers;
};
export type deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse =
| deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponseSuccess
| deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponseError;
export const getDeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteUrl =
(boardId: string, agentId: string) => {
return `/api/v1/agent/boards/${boardId}/agents/${agentId}`;
};
export const deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete =
async (
boardId: string,
agentId: string,
options?: RequestInit,
): Promise<deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse> => {
return customFetch<deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteResponse>(
getDeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteUrl(
boardId,
agentId,
),
{
...options,
method: "DELETE",
},
);
};
export const getDeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete
>
>,
TError,
{ boardId: string; agentId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<
typeof deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete
>
>,
TError,
{ boardId: string; agentId: string },
TContext
> => {
const mutationKey = [
"deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<
typeof deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete
>
>,
{ boardId: string; agentId: string }
> = (props) => {
const { boardId, agentId } = props ?? {};
return deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete(
boardId,
agentId,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteMutationResult =
NonNullable<
Awaited<
ReturnType<
typeof deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete
>
>
>;
export type DeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteMutationError =
HTTPValidationError;
/**
* @summary Delete Board Agent
*/
export const useDeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete
>
>,
TError,
{ boardId: string; agentId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<
ReturnType<
typeof deleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDelete
>
>,
TError,
{ boardId: string; agentId: string },
TContext
> => {
return useMutation(
getDeleteBoardAgentApiV1AgentBoardsBoardIdAgentsAgentIdDeleteMutationOptions(
options,
),
queryClient,
);
};
/**
* Route a lead's ask-user request through the dedicated gateway agent.
* @summary Ask User Via Gateway Main

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -0,0 +1,10 @@
/**
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0
*/
export interface AgentDeleteConfirm {
token: string;
}

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -0,0 +1,11 @@
/**
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0
*/
export interface AgentProvisionConfirm {
token: string;
action?: string | null;
}

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0
@@ -14,7 +14,6 @@ import type { ApprovalCreateStatus } from "./approvalCreateStatus";
export interface ApprovalCreate {
action_type: string;
task_id?: string | null;
task_ids?: string[];
payload?: ApprovalCreatePayload;
confidence: number;
rubric_scores?: ApprovalCreateRubricScores;

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0
@@ -14,7 +14,6 @@ import type { ApprovalReadStatus } from "./approvalReadStatus";
export interface ApprovalRead {
action_type: string;
task_id?: string | null;
task_ids?: string[];
payload?: ApprovalReadPayload;
confidence: number;
rubric_scores?: ApprovalReadRubricScores;

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

View File

@@ -1,5 +1,5 @@
/**
* Generated by orval v8.3.0 🍺
* Generated by orval v8.2.0 🍺
* Do not edit manually.
* Mission Control API
* OpenAPI spec version: 0.1.0

Some files were not shown because too many files have changed in this diff Show More