39 Commits

Author SHA1 Message Date
Abhimanyu Saharan
bb66190913 Suppress routine GitHub CI webhook deliveries 2026-02-14 14:29:15 +00:00
Abhimanyu Saharan
4557fcc8ae test: add webhook dispatch worker coverage for process loop 2026-02-14 06:38:19 +00:00
Abhimanyu Saharan
554b73184a chore: decouple webhook-worker from backend service dependency 2026-02-14 06:36:46 +00:00
Abhimanyu Saharan
912387bd1c chore: centralize webhook scheduler interval config 2026-02-14 06:36:09 +00:00
Abhimanyu Saharan
d01365abfb fix: resolve mypy typing issues in webhook queue dispatch 2026-02-14 06:33:35 +00:00
Abhimanyu Saharan
e535f377ff fix: remove unused UUID import in webhook dispatch test 2026-02-14 06:30:11 +00:00
Abhimanyu Saharan
b987db58b8 feat: add RQ-based webhook dispatch queue and delayed worker 2026-02-14 06:23:55 +00:00
Abhimanyu Saharan
42a41f64bc migrations: update down_revision for activity_events event_type index 2026-02-14 11:30:49 +05:30
Abhimanyu Saharan
db510a8612 Merge pull request #127 from abhi1693/fix/merge-alembic-heads-2766f27
Fix installer CI: merge Alembic heads to unblock DB_AUTO_MIGRATE
2026-02-14 03:21:27 +05:30
Abhimanyu Saharan
24eaa5df54 migrations: merge alembic heads (installer CI unblock) 2026-02-13 21:43:08 +00:00
Abhimanyu Saharan
2766f271f0 Merge pull request #113 from abhi1693/perf/activity-events-eventtype-createdat
perf(db): index activity_events by (event_type, created_at)
2026-02-14 02:55:37 +05:30
Abhimanyu Saharan
a3ef1c1c07 Merge pull request #126 from abhi1693/fix/blocked-transition-409
Fix blocked task transitions: return 409 (no silent reset)
2026-02-14 02:12:32 +05:30
Abhimanyu Saharan
5ccdfc7a60 test: adapt blocked lead transition tests to custom_field_values 2026-02-13 20:20:02 +00:00
Abhimanyu Saharan
bf4ccc8a81 ci: rerun checks 2026-02-13 20:19:27 +00:00
Abhimanyu Saharan
f19f3106e3 chore(tests): fix lint (remove unused import) 2026-02-13 20:19:27 +00:00
Abhimanyu Saharan
8c10cef90f fix(backend): reject lead updates on blocked tasks (409)
- Remove silent blocked-task reset in _apply_lead_task_update\n- Include stable error code in blocked task payload\n- Add regression tests asserting 409 + DB unchanged
2026-02-13 20:19:27 +00:00
Abhimanyu Saharan
52f0ca79e0 feat: add conditional execution for migration integrity gate on pull requests 2026-02-14 00:28:52 +05:30
Abhimanyu Saharan
e0aef61fb5 Merge pull request #125 from abhi1693/installer
installer: merge portability scaffold into master
2026-02-14 00:20:17 +05:30
Abhimanyu Saharan
c772063280 Merge branch 'master' into installer 2026-02-14 00:20:07 +05:30
Abhimanyu Saharan
07a86d4604 feat: enhance task update validation for board leads with detailed error messages 2026-02-14 00:08:44 +05:30
Abhimanyu Saharan
68b6029ac3 feat: add GATEWAY_OPERATOR_SCOPES and update connection parameters for operator role 2026-02-13 23:37:10 +05:30
Abhimanyu Saharan
7b16b49218 feat: enhance user display name resolution and update related components 2026-02-13 22:37:08 +05:30
Abhimanyu Saharan
35a9471eb6 ci: restore push trigger to master only 2026-02-13 16:55:12 +00:00
Abhimanyu Saharan
552dbe0cd9 installer: move logs to XDG state dir 2026-02-13 16:53:35 +00:00
Abhimanyu Saharan
30c337d733 installer: remove unused id_like variable 2026-02-13 16:52:25 +00:00
Abhimanyu Saharan
80c4390dec installer: validate required flag values in parse_args 2026-02-13 16:49:42 +00:00
Abhimanyu Saharan
22a51cccfb docs: add curl|bash one-liner for install.sh 2026-02-13 16:40:13 +00:00
Abhimanyu Saharan
c42e8484f8 refactor: remove redundant custom fields button for non-admin users 2026-02-13 22:07:17 +05:30
Abhimanyu Saharan
619f77286f feat: add task detail URL handling and utility functions for taskId management 2026-02-13 22:06:37 +05:30
Abhimanyu Saharan
372b4e191c feat: implement custom field form and utility functions for managing custom fields 2026-02-13 22:01:18 +05:30
Abhimanyu Saharan
b98d6e2f83 feat: add lead_reasoning field to ApprovalCreate and update validation logic 2026-02-13 21:34:08 +05:30
Abhimanyu Saharan
aea69f5118 refactor: improve code formatting and readability in page.tsx 2026-02-13 21:30:29 +05:30
Abhimanyu Saharan
a409358a9a installer: portability scaffold for non-debian distros
Merge installer portability scaffold
2026-02-13 21:29:44 +05:30
Abhimanyu Saharan
277bfcb33a feat: add custom-fields 2026-02-13 21:24:36 +05:30
Abhimanyu Saharan
be11110620 installer: prefer /usr/bin/node after NodeSource install 2026-02-13 14:15:40 +00:00
Abhimanyu Saharan
01fc50a1d2 installer: require Node.js >= 22 2026-02-13 14:07:17 +00:00
Abhimanyu Saharan
645e620ae9 installer: scaffold package manager abstraction and support matrix 2026-02-13 09:40:54 +00:00
Abhimanyu Saharan
6e8069fc2a chore: remove manual CI trigger for master branch in workflow configuration 2026-02-13 14:45:15 +05:30
Abhimanyu Saharan
8750335281 feat: add installer script and CI configuration for deployment modes 2026-02-13 14:41:27 +05:30
169 changed files with 14055 additions and 6807 deletions

View File

@@ -4,7 +4,6 @@ on:
pull_request:
push:
branches: [master]
# Allow maintainers to manually kick CI when GitHub doesn't create a run for a new head SHA.
workflow_dispatch:
concurrency:
@@ -64,6 +63,7 @@ jobs:
- name: Run migration integrity gate
if: ${{ github.event_name == 'pull_request' }}
run: |
set -euo pipefail
@@ -132,6 +132,55 @@ jobs:
backend/coverage.xml
frontend/coverage/**
installer:
runs-on: ubuntu-latest
needs: [check]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Validate installer shell syntax
run: bash -n install.sh
- name: Installer smoke test (docker mode)
run: |
./install.sh \
--mode docker \
--backend-port 18000 \
--frontend-port 13000 \
--public-host localhost \
--api-url http://localhost:18000 \
--token-mode generate
curl -fsS http://127.0.0.1:18000/healthz >/dev/null
curl -fsS http://127.0.0.1:13000 >/dev/null
- name: Cleanup docker stack after docker mode
if: always()
run: |
docker compose -f compose.yml --env-file .env down -v --remove-orphans || true
- name: Installer smoke test (local mode)
run: |
./install.sh \
--mode local \
--backend-port 18001 \
--frontend-port 13001 \
--public-host localhost \
--api-url http://localhost:18001 \
--token-mode generate \
--db-mode docker \
--start-services yes
curl -fsS http://127.0.0.1:18001/healthz >/dev/null
curl -fsS http://127.0.0.1:13001 >/dev/null
- name: Cleanup local processes and docker resources
if: always()
run: |
if [ -f .install-logs/backend.pid ]; then kill "$(cat .install-logs/backend.pid)" || true; fi
if [ -f .install-logs/frontend.pid ]; then kill "$(cat .install-logs/frontend.pid)" || true; fi
docker compose -f compose.yml --env-file .env down -v --remove-orphans || true
e2e:
runs-on: ubuntu-latest
needs: [check]

View File

@@ -45,6 +45,31 @@ Core operational areas:
## Get started in minutes
### Option A: One-command production-style bootstrap
If you haven't cloned the repo yet, you can run the installer in one line:
```bash
curl -fsSL https://raw.githubusercontent.com/abhi1693/openclaw-mission-control/master/install.sh | bash
```
If you already cloned the repo:
```bash
./install.sh
```
The installer is interactive and will:
- Ask for deployment mode (`docker` or `local`).
- Install missing system dependencies when possible.
- Generate and configure environment files.
- Bootstrap and start the selected deployment mode.
Installer support matrix: [`docs/installer-support.md`](./docs/installer-support.md)
### Option B: Manual setup
### Prerequisites
- Docker Engine

View File

@@ -12,12 +12,19 @@ BASE_URL=
AUTH_MODE=local
# REQUIRED when AUTH_MODE=local (must be non-placeholder and at least 50 chars).
LOCAL_AUTH_TOKEN=
# Clerk (auth only; used when AUTH_MODE=clerk)
CLERK_SECRET_KEY=
CLERK_API_URL=https://api.clerk.com
CLERK_VERIFY_IAT=true
CLERK_LEEWAY=10.0
# Database
DB_AUTO_MIGRATE=false
# Webhook queue / worker
WEBHOOK_REDIS_URL=redis://localhost:6379/0
WEBHOOK_QUEUE_NAME=webhook-dispatch
WEBHOOK_DISPATCH_THROTTLE_SECONDS=2.0
WEBHOOK_DISPATCH_SCHEDULE_ID=webhook-dispatch-batch
WEBHOOK_DISPATCH_SCHEDULE_INTERVAL_SECONDS=900
WEBHOOK_DISPATCH_MAX_RETRIES=3
# Suppress routine GitHub CI telemetry events from lead notifications (still persisted to DB/memory).
WEBHOOK_DISPATCH_SUPPRESS_ROUTINE_EVENTS=true

View File

@@ -287,13 +287,16 @@ async def create_task(
"""Create a task as the board lead.
Lead-only endpoint. Supports dependency-aware creation via
`depends_on_task_ids` and optional `tag_ids`.
`depends_on_task_ids`, optional `tag_ids`, and `custom_field_values`.
"""
_guard_board_access(agent_ctx, board)
_require_board_lead(agent_ctx)
data = payload.model_dump(exclude={"depends_on_task_ids", "tag_ids"})
data = payload.model_dump(
exclude={"depends_on_task_ids", "tag_ids", "custom_field_values"},
)
depends_on_task_ids = list(payload.depends_on_task_ids)
tag_ids = list(payload.tag_ids)
custom_field_values = dict(payload.custom_field_values)
task = Task.model_validate(data)
task.board_id = board.id
@@ -343,6 +346,12 @@ async def create_task(
session.add(task)
# Ensure the task exists in the DB before inserting dependency rows.
await session.flush()
await tasks_api._set_task_custom_field_values_for_create(
session,
board_id=board.id,
task_id=task.id,
custom_field_values=custom_field_values,
)
for dep_id in normalized_deps:
session.add(
TaskDependency(

View File

@@ -394,13 +394,10 @@ async def create_approval(
_actor: ActorContext = ACTOR_DEP,
) -> ApprovalRead:
"""Create an approval for a board."""
payload_dict = payload.payload
if payload_dict is None and isinstance(payload.lead_reasoning, str) and payload.lead_reasoning.strip():
payload_dict = {"reason": payload.lead_reasoning.strip()}
task_ids = normalize_task_ids(
task_id=payload.task_id,
task_ids=payload.task_ids,
payload=payload_dict,
payload=payload.payload,
)
task_id = task_ids[0] if task_ids else None
if payload.status == "pending":
@@ -414,7 +411,7 @@ async def create_approval(
task_id=task_id,
agent_id=payload.agent_id,
action_type=payload.action_type,
payload=payload_dict,
payload=payload.payload,
confidence=payload.confidence,
rubric_scores=payload.rubric_scores,
status=payload.status,

View File

@@ -29,6 +29,7 @@ from app.schemas.board_webhooks import (
from app.schemas.common import OkResponse
from app.schemas.pagination import DefaultLimitOffsetPage
from app.services.openclaw.gateway_dispatch import GatewayDispatchService
from app.services.webhooks.queue import QueuedWebhookDelivery, enqueue_webhook_delivery
if TYPE_CHECKING:
from collections.abc import Sequence
@@ -166,6 +167,12 @@ def _captured_headers(request: Request) -> dict[str, str] | None:
return captured or None
def _extract_webhook_event(headers: dict[str, str] | None) -> str | None:
if not headers:
return None
return headers.get("x-github-event") or headers.get("x-event-type")
def _payload_preview(
value: dict[str, object] | list[object] | str | int | float | bool | None,
) -> str:
@@ -412,6 +419,7 @@ async def ingest_board_webhook(
)
content_type = request.headers.get("content-type")
headers = _captured_headers(request)
payload_value = _decode_payload(
await request.body(),
content_type=content_type,
@@ -420,7 +428,7 @@ async def ingest_board_webhook(
board_id=board.id,
webhook_id=webhook.id,
payload=payload_value,
headers=_captured_headers(request),
headers=headers,
source_ip=request.client.host if request.client else None,
content_type=content_type,
)
@@ -438,12 +446,25 @@ async def ingest_board_webhook(
)
session.add(memory)
await session.commit()
await _notify_lead_on_webhook_payload(
session=session,
board=board,
webhook=webhook,
payload=payload,
enqueued = enqueue_webhook_delivery(
QueuedWebhookDelivery(
board_id=board.id,
webhook_id=webhook.id,
payload_id=payload.id,
payload_event=_extract_webhook_event(headers),
received_at=payload.received_at,
),
)
if not enqueued:
# Preserve historical behavior by still notifying synchronously if queueing fails.
await _notify_lead_on_webhook_payload(
session=session,
board=board,
webhook=webhook,
payload=payload,
)
return BoardWebhookIngestResponse(
board_id=board.id,
webhook_id=webhook.id,

View File

@@ -0,0 +1,343 @@
"""Organization-level task custom field definition management."""
from __future__ import annotations
from typing import TYPE_CHECKING
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy import func
from sqlalchemy.exc import IntegrityError
from sqlmodel import col, select
from app.api.deps import require_org_admin, require_org_member
from app.core.time import utcnow
from app.db.session import get_session
from app.models.boards import Board
from app.models.task_custom_fields import (
BoardTaskCustomField,
TaskCustomFieldDefinition,
TaskCustomFieldValue,
)
from app.schemas.common import OkResponse
from app.schemas.task_custom_fields import (
TaskCustomFieldDefinitionCreate,
TaskCustomFieldDefinitionRead,
TaskCustomFieldDefinitionUpdate,
validate_custom_field_definition,
)
from app.services.organizations import OrganizationContext
if TYPE_CHECKING:
from sqlmodel.ext.asyncio.session import AsyncSession
router = APIRouter(prefix="/organizations/me/custom-fields", tags=["org-custom-fields"])
SESSION_DEP = Depends(get_session)
ORG_MEMBER_DEP = Depends(require_org_member)
ORG_ADMIN_DEP = Depends(require_org_admin)
def _to_definition_read_payload(
*,
definition: TaskCustomFieldDefinition,
board_ids: list[UUID],
) -> TaskCustomFieldDefinitionRead:
payload = TaskCustomFieldDefinitionRead.model_validate(definition, from_attributes=True)
payload.board_ids = board_ids
return payload
async def _board_ids_by_definition_id(
*,
session: AsyncSession,
definition_ids: list[UUID],
) -> dict[UUID, list[UUID]]:
if not definition_ids:
return {}
rows = (
await session.exec(
select(
col(BoardTaskCustomField.task_custom_field_definition_id),
col(BoardTaskCustomField.board_id),
).where(
col(BoardTaskCustomField.task_custom_field_definition_id).in_(definition_ids),
),
)
).all()
board_ids_by_definition_id: dict[UUID, list[UUID]] = {
definition_id: [] for definition_id in definition_ids
}
for definition_id, board_id in rows:
board_ids_by_definition_id.setdefault(definition_id, []).append(board_id)
for definition_id in board_ids_by_definition_id:
board_ids_by_definition_id[definition_id].sort(key=str)
return board_ids_by_definition_id
async def _validated_board_ids_for_org(
*,
session: AsyncSession,
ctx: OrganizationContext,
board_ids: list[UUID],
) -> list[UUID]:
normalized_board_ids = list(dict.fromkeys(board_ids))
if not normalized_board_ids:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="At least one board must be selected.",
)
valid_board_ids = set(
(
await session.exec(
select(col(Board.id)).where(
col(Board.organization_id) == ctx.organization.id,
col(Board.id).in_(normalized_board_ids),
),
)
).all(),
)
missing_board_ids = sorted(
{board_id for board_id in normalized_board_ids if board_id not in valid_board_ids},
key=str,
)
if missing_board_ids:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Some selected boards are invalid for this organization.",
"invalid_board_ids": [str(value) for value in missing_board_ids],
},
)
return normalized_board_ids
async def _get_org_definition(
*,
session: AsyncSession,
ctx: OrganizationContext,
definition_id: UUID,
) -> TaskCustomFieldDefinition:
definition = (
await session.exec(
select(TaskCustomFieldDefinition).where(
col(TaskCustomFieldDefinition.id) == definition_id,
col(TaskCustomFieldDefinition.organization_id) == ctx.organization.id,
),
)
).first()
if definition is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return definition
@router.get("", response_model=list[TaskCustomFieldDefinitionRead])
async def list_org_custom_fields(
ctx: OrganizationContext = ORG_MEMBER_DEP,
session: AsyncSession = SESSION_DEP,
) -> list[TaskCustomFieldDefinitionRead]:
"""List task custom field definitions for the authenticated organization."""
definitions = list(
await session.exec(
select(TaskCustomFieldDefinition)
.where(col(TaskCustomFieldDefinition.organization_id) == ctx.organization.id)
.order_by(func.lower(col(TaskCustomFieldDefinition.label)).asc()),
),
)
board_ids_by_definition_id = await _board_ids_by_definition_id(
session=session,
definition_ids=[definition.id for definition in definitions],
)
return [
_to_definition_read_payload(
definition=definition,
board_ids=board_ids_by_definition_id.get(definition.id, []),
)
for definition in definitions
]
@router.post("", response_model=TaskCustomFieldDefinitionRead)
async def create_org_custom_field(
payload: TaskCustomFieldDefinitionCreate,
ctx: OrganizationContext = ORG_ADMIN_DEP,
session: AsyncSession = SESSION_DEP,
) -> TaskCustomFieldDefinitionRead:
"""Create an organization-level task custom field definition."""
board_ids = await _validated_board_ids_for_org(
session=session,
ctx=ctx,
board_ids=payload.board_ids,
)
try:
validate_custom_field_definition(
field_type=payload.field_type,
validation_regex=payload.validation_regex,
default_value=payload.default_value,
)
except ValueError as err:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=str(err),
) from err
definition = TaskCustomFieldDefinition(
organization_id=ctx.organization.id,
field_key=payload.field_key,
label=payload.label or payload.field_key,
field_type=payload.field_type,
ui_visibility=payload.ui_visibility,
validation_regex=payload.validation_regex,
description=payload.description,
required=payload.required,
default_value=payload.default_value,
)
session.add(definition)
await session.flush()
for board_id in board_ids:
session.add(
BoardTaskCustomField(
board_id=board_id,
task_custom_field_definition_id=definition.id,
),
)
try:
await session.commit()
except IntegrityError as err:
await session.rollback()
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Field key already exists in this organization.",
) from err
await session.refresh(definition)
return _to_definition_read_payload(definition=definition, board_ids=board_ids)
@router.patch("/{task_custom_field_definition_id}", response_model=TaskCustomFieldDefinitionRead)
async def update_org_custom_field(
task_custom_field_definition_id: UUID,
payload: TaskCustomFieldDefinitionUpdate,
ctx: OrganizationContext = ORG_ADMIN_DEP,
session: AsyncSession = SESSION_DEP,
) -> TaskCustomFieldDefinitionRead:
"""Update an organization-level task custom field definition."""
definition = await _get_org_definition(
session=session,
ctx=ctx,
definition_id=task_custom_field_definition_id,
)
updates = payload.model_dump(exclude_unset=True)
board_ids = updates.pop("board_ids", None)
validated_board_ids: list[UUID] | None = None
if board_ids is not None:
validated_board_ids = await _validated_board_ids_for_org(
session=session,
ctx=ctx,
board_ids=board_ids,
)
next_field_type = updates.get("field_type", definition.field_type)
next_validation_regex = (
updates["validation_regex"]
if "validation_regex" in updates
else definition.validation_regex
)
next_default_value = (
updates["default_value"] if "default_value" in updates else definition.default_value
)
try:
validate_custom_field_definition(
field_type=next_field_type,
validation_regex=next_validation_regex,
default_value=next_default_value,
)
except ValueError as err:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=str(err),
) from err
for key, value in updates.items():
setattr(definition, key, value)
if validated_board_ids is not None:
bindings = list(
await session.exec(
select(BoardTaskCustomField).where(
col(BoardTaskCustomField.task_custom_field_definition_id) == definition.id,
),
),
)
current_board_ids = {binding.board_id for binding in bindings}
target_board_ids = set(validated_board_ids)
for binding in bindings:
if binding.board_id not in target_board_ids:
await session.delete(binding)
for board_id in validated_board_ids:
if board_id in current_board_ids:
continue
session.add(
BoardTaskCustomField(
board_id=board_id,
task_custom_field_definition_id=definition.id,
),
)
definition.updated_at = utcnow()
session.add(definition)
try:
await session.commit()
except IntegrityError as err:
await session.rollback()
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Field key already exists in this organization.",
) from err
await session.refresh(definition)
if validated_board_ids is None:
board_ids = (
await _board_ids_by_definition_id(
session=session,
definition_ids=[definition.id],
)
).get(definition.id, [])
else:
board_ids = validated_board_ids
return _to_definition_read_payload(definition=definition, board_ids=board_ids)
@router.delete("/{task_custom_field_definition_id}", response_model=OkResponse)
async def delete_org_custom_field(
task_custom_field_definition_id: UUID,
ctx: OrganizationContext = ORG_ADMIN_DEP,
session: AsyncSession = SESSION_DEP,
) -> OkResponse:
"""Delete an org-level definition when it has no persisted task values."""
definition = await _get_org_definition(
session=session,
ctx=ctx,
definition_id=task_custom_field_definition_id,
)
value_ids = (
await session.exec(
select(col(TaskCustomFieldValue.id)).where(
col(TaskCustomFieldValue.task_custom_field_definition_id) == definition.id,
),
)
).all()
if value_ids:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Cannot delete a custom field definition while task values exist.",
)
bindings = list(
await session.exec(
select(BoardTaskCustomField).where(
col(BoardTaskCustomField.task_custom_field_definition_id) == definition.id,
),
),
)
for binding in bindings:
await session.delete(binding)
await session.delete(definition)
await session.commit()
return OkResponse()

View File

@@ -7,7 +7,7 @@ import json
from collections import deque
from dataclasses import dataclass
from datetime import UTC, datetime
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, cast
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
@@ -33,6 +33,11 @@ from app.models.approval_task_links import ApprovalTaskLink
from app.models.approvals import Approval
from app.models.boards import Board
from app.models.tag_assignments import TagAssignment
from app.models.task_custom_fields import (
BoardTaskCustomField,
TaskCustomFieldDefinition,
TaskCustomFieldValue,
)
from app.models.task_dependencies import TaskDependency
from app.models.task_fingerprints import TaskFingerprint
from app.models.tasks import Task
@@ -40,6 +45,11 @@ from app.schemas.activity_events import ActivityEventRead
from app.schemas.common import OkResponse
from app.schemas.errors import BlockedTaskError
from app.schemas.pagination import DefaultLimitOffsetPage
from app.schemas.task_custom_fields import (
TaskCustomFieldType,
TaskCustomFieldValues,
validate_custom_field_value,
)
from app.schemas.tasks import TaskCommentCreate, TaskCommentRead, TaskCreate, TaskRead, TaskUpdate
from app.services.activity_log import record_activity
from app.services.approval_task_links import (
@@ -99,6 +109,16 @@ ADMIN_AUTH_DEP = Depends(require_admin_auth)
TASK_DEP = Depends(get_task_or_404)
@dataclass(frozen=True, slots=True)
class _BoardCustomFieldDefinition:
id: UUID
field_key: str
field_type: TaskCustomFieldType
validation_regex: str | None
required: bool
default_value: object | None
def _comment_validation_error() -> HTTPException:
return HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -107,10 +127,12 @@ def _comment_validation_error() -> HTTPException:
def _blocked_task_error(blocked_by_task_ids: Sequence[UUID]) -> HTTPException:
# NOTE: Keep this payload machine-readable; UI and automation rely on it.
return HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail={
"message": "Task is blocked by incomplete dependencies.",
"code": "task_blocked_cannot_transition",
"blocked_by_task_ids": [str(value) for value in blocked_by_task_ids],
},
)
@@ -697,6 +719,281 @@ def _status_values(status_filter: str | None) -> list[str]:
return values
async def _organization_custom_field_definitions_for_board(
session: AsyncSession,
*,
board_id: UUID,
) -> dict[str, _BoardCustomFieldDefinition]:
organization_id = (
await session.exec(
select(Board.organization_id).where(col(Board.id) == board_id),
)
).first()
if organization_id is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
definitions = list(
await session.exec(
select(TaskCustomFieldDefinition)
.join(
BoardTaskCustomField,
col(BoardTaskCustomField.task_custom_field_definition_id)
== col(TaskCustomFieldDefinition.id),
)
.where(
col(BoardTaskCustomField.board_id) == board_id,
col(TaskCustomFieldDefinition.organization_id) == organization_id,
),
),
)
return {
definition.field_key: _BoardCustomFieldDefinition(
id=definition.id,
field_key=definition.field_key,
field_type=cast(TaskCustomFieldType, definition.field_type),
validation_regex=definition.validation_regex,
required=definition.required,
default_value=definition.default_value,
)
for definition in definitions
}
def _reject_unknown_custom_field_keys(
*,
custom_field_values: TaskCustomFieldValues,
definitions_by_key: dict[str, _BoardCustomFieldDefinition],
) -> None:
unknown_field_keys = sorted(set(custom_field_values) - set(definitions_by_key))
if not unknown_field_keys:
return
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Unknown custom field keys for this board.",
"unknown_field_keys": unknown_field_keys,
},
)
def _reject_missing_required_custom_field_keys(
*,
effective_values: TaskCustomFieldValues,
definitions_by_key: dict[str, _BoardCustomFieldDefinition],
) -> None:
missing_field_keys = [
definition.field_key
for definition in definitions_by_key.values()
if definition.required and effective_values.get(definition.field_key) is None
]
if not missing_field_keys:
return
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Required custom fields must have values.",
"missing_field_keys": sorted(missing_field_keys),
},
)
def _reject_invalid_custom_field_values(
*,
custom_field_values: TaskCustomFieldValues,
definitions_by_key: dict[str, _BoardCustomFieldDefinition],
) -> None:
for field_key, value in custom_field_values.items():
definition = definitions_by_key[field_key]
try:
validate_custom_field_value(
field_type=definition.field_type,
value=value,
validation_regex=definition.validation_regex,
)
except ValueError as err:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Invalid custom field value.",
"field_key": field_key,
"field_type": definition.field_type,
"reason": str(err),
},
) from err
async def _task_custom_field_rows_by_definition_id(
session: AsyncSession,
*,
task_id: UUID,
definition_ids: list[UUID],
) -> dict[UUID, TaskCustomFieldValue]:
if not definition_ids:
return {}
rows = list(
await session.exec(
select(TaskCustomFieldValue).where(
col(TaskCustomFieldValue.task_id) == task_id,
col(TaskCustomFieldValue.task_custom_field_definition_id).in_(definition_ids),
),
),
)
return {row.task_custom_field_definition_id: row for row in rows}
async def _set_task_custom_field_values_for_create(
session: AsyncSession,
*,
board_id: UUID,
task_id: UUID,
custom_field_values: TaskCustomFieldValues,
) -> None:
definitions_by_key = await _organization_custom_field_definitions_for_board(
session,
board_id=board_id,
)
_reject_unknown_custom_field_keys(
custom_field_values=custom_field_values,
definitions_by_key=definitions_by_key,
)
_reject_invalid_custom_field_values(
custom_field_values=custom_field_values,
definitions_by_key=definitions_by_key,
)
effective_values: TaskCustomFieldValues = {}
for field_key, definition in definitions_by_key.items():
if field_key in custom_field_values:
effective_values[field_key] = custom_field_values[field_key]
else:
effective_values[field_key] = definition.default_value
_reject_missing_required_custom_field_keys(
effective_values=effective_values,
definitions_by_key=definitions_by_key,
)
for field_key, definition in definitions_by_key.items():
value = effective_values.get(field_key)
if value is None:
continue
session.add(
TaskCustomFieldValue(
task_id=task_id,
task_custom_field_definition_id=definition.id,
value=value,
),
)
async def _set_task_custom_field_values_for_update(
session: AsyncSession,
*,
board_id: UUID,
task_id: UUID,
custom_field_values: TaskCustomFieldValues,
) -> None:
definitions_by_key = await _organization_custom_field_definitions_for_board(
session,
board_id=board_id,
)
_reject_unknown_custom_field_keys(
custom_field_values=custom_field_values,
definitions_by_key=definitions_by_key,
)
_reject_invalid_custom_field_values(
custom_field_values=custom_field_values,
definitions_by_key=definitions_by_key,
)
definitions_by_id = {definition.id: definition for definition in definitions_by_key.values()}
rows_by_definition_id = await _task_custom_field_rows_by_definition_id(
session,
task_id=task_id,
definition_ids=list(definitions_by_id),
)
effective_values: TaskCustomFieldValues = {}
for field_key, definition in definitions_by_key.items():
current_row = rows_by_definition_id.get(definition.id)
if field_key in custom_field_values:
effective_values[field_key] = custom_field_values[field_key]
elif current_row is not None:
effective_values[field_key] = current_row.value
else:
effective_values[field_key] = definition.default_value
_reject_missing_required_custom_field_keys(
effective_values=effective_values,
definitions_by_key=definitions_by_key,
)
for field_key, value in custom_field_values.items():
definition = definitions_by_key[field_key]
row = rows_by_definition_id.get(definition.id)
if value is None:
if row is not None:
await session.delete(row)
continue
if row is None:
session.add(
TaskCustomFieldValue(
task_id=task_id,
task_custom_field_definition_id=definition.id,
value=value,
),
)
continue
row.value = value
row.updated_at = utcnow()
session.add(row)
async def _task_custom_field_values_by_task_id(
session: AsyncSession,
*,
board_id: UUID,
task_ids: Sequence[UUID],
) -> dict[UUID, TaskCustomFieldValues]:
unique_task_ids = list({*task_ids})
if not unique_task_ids:
return {}
definitions_by_key = await _organization_custom_field_definitions_for_board(
session,
board_id=board_id,
)
if not definitions_by_key:
return {task_id: {} for task_id in unique_task_ids}
definitions_by_id = {definition.id: definition for definition in definitions_by_key.values()}
default_values = {
field_key: definition.default_value for field_key, definition in definitions_by_key.items()
}
values_by_task_id: dict[UUID, TaskCustomFieldValues] = {
task_id: dict(default_values) for task_id in unique_task_ids
}
rows = (
await session.exec(
select(
col(TaskCustomFieldValue.task_id),
col(TaskCustomFieldValue.task_custom_field_definition_id),
col(TaskCustomFieldValue.value),
).where(
col(TaskCustomFieldValue.task_id).in_(unique_task_ids),
col(TaskCustomFieldValue.task_custom_field_definition_id).in_(
list(definitions_by_id),
),
),
)
).all()
for task_id, definition_id, value in rows:
definition = definitions_by_id.get(definition_id)
if definition is None:
continue
values_by_task_id[task_id][definition.field_key] = value
return values_by_task_id
def _task_list_statement(
*,
board_id: UUID,
@@ -742,6 +1039,11 @@ async def _task_read_page(
board_id=board_id,
dependency_ids=list({*dep_ids}),
)
custom_field_values_by_task_id = await _task_custom_field_values_by_task_id(
session,
board_id=board_id,
task_ids=task_ids,
)
output: list[TaskRead] = []
for task in tasks:
@@ -761,6 +1063,7 @@ async def _task_read_page(
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_by,
"is_blocked": bool(blocked_by),
"custom_field_values": custom_field_values_by_task_id.get(task.id, {}),
},
),
)
@@ -772,12 +1075,17 @@ async def _stream_task_state(
*,
board_id: UUID,
rows: list[tuple[ActivityEvent, Task | None]],
) -> tuple[dict[UUID, list[UUID]], dict[UUID, str], dict[UUID, TagState]]:
) -> tuple[
dict[UUID, list[UUID]],
dict[UUID, str],
dict[UUID, TagState],
dict[UUID, TaskCustomFieldValues],
]:
task_ids = [
task.id for event, task in rows if task is not None and event.event_type != "task.comment"
]
if not task_ids:
return {}, {}, {}
return {}, {}, {}, {}
tag_state_by_task_id = await load_tag_state(
session,
@@ -791,15 +1099,20 @@ async def _stream_task_state(
dep_ids: list[UUID] = []
for value in deps_map.values():
dep_ids.extend(value)
custom_field_values_by_task_id = await _task_custom_field_values_by_task_id(
session,
board_id=board_id,
task_ids=list({*task_ids}),
)
if not dep_ids:
return deps_map, {}, tag_state_by_task_id
return deps_map, {}, tag_state_by_task_id, custom_field_values_by_task_id
dep_status = await dependency_status_by_id(
session,
board_id=board_id,
dependency_ids=list({*dep_ids}),
)
return deps_map, dep_status, tag_state_by_task_id
return deps_map, dep_status, tag_state_by_task_id, custom_field_values_by_task_id
def _task_event_payload(
@@ -809,7 +1122,9 @@ def _task_event_payload(
deps_map: dict[UUID, list[UUID]],
dep_status: dict[UUID, str],
tag_state_by_task_id: dict[UUID, TagState],
custom_field_values_by_task_id: dict[UUID, TaskCustomFieldValues] | None = None,
) -> dict[str, object]:
resolved_custom_field_values_by_task_id = custom_field_values_by_task_id or {}
payload: dict[str, object] = {
"type": event.event_type,
"activity": ActivityEventRead.model_validate(event).model_dump(mode="json"),
@@ -838,6 +1153,10 @@ def _task_event_payload(
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_by,
"is_blocked": bool(blocked_by),
"custom_field_values": resolved_custom_field_values_by_task_id.get(
task.id,
{},
),
},
)
.model_dump(mode="json")
@@ -861,10 +1180,12 @@ async def _task_event_generator(
async with async_session_maker() as session:
rows = await _fetch_task_events(session, board_id, last_seen)
deps_map, dep_status, tag_state_by_task_id = await _stream_task_state(
session,
board_id=board_id,
rows=rows,
deps_map, dep_status, tag_state_by_task_id, custom_field_values_by_task_id = (
await _stream_task_state(
session,
board_id=board_id,
rows=rows,
)
)
for event, task in rows:
@@ -883,6 +1204,7 @@ async def _task_event_generator(
deps_map=deps_map,
dep_status=dep_status,
tag_state_by_task_id=tag_state_by_task_id,
custom_field_values_by_task_id=custom_field_values_by_task_id,
)
yield {"event": "task", "data": json.dumps(payload)}
await asyncio.sleep(2)
@@ -943,9 +1265,10 @@ async def create_task(
auth: AuthContext = ADMIN_AUTH_DEP,
) -> TaskRead:
"""Create a task and initialize dependency rows."""
data = payload.model_dump(exclude={"depends_on_task_ids", "tag_ids"})
data = payload.model_dump(exclude={"depends_on_task_ids", "tag_ids", "custom_field_values"})
depends_on_task_ids = list(payload.depends_on_task_ids)
tag_ids = list(payload.tag_ids)
custom_field_values = dict(payload.custom_field_values)
task = Task.model_validate(data)
task.board_id = board.id
@@ -977,6 +1300,12 @@ async def create_task(
session.add(task)
# Ensure the task exists in the DB before inserting dependency rows.
await session.flush()
await _set_task_custom_field_values_for_create(
session,
board_id=board.id,
task_id=task.id,
custom_field_values=custom_field_values,
)
for dep_id in normalized_deps:
session.add(
TaskDependency(
@@ -1051,9 +1380,14 @@ async def update_task(
payload.depends_on_task_ids if "depends_on_task_ids" in payload.model_fields_set else None
)
tag_ids = payload.tag_ids if "tag_ids" in payload.model_fields_set else None
custom_field_values = (
payload.custom_field_values if "custom_field_values" in payload.model_fields_set else None
)
custom_field_values_set = "custom_field_values" in payload.model_fields_set
updates.pop("comment", None)
updates.pop("depends_on_task_ids", None)
updates.pop("tag_ids", None)
updates.pop("custom_field_values", None)
requested_status = payload.status if "status" in payload.model_fields_set else None
update = _TaskUpdateInput(
task=task,
@@ -1066,6 +1400,8 @@ async def update_task(
comment=comment,
depends_on_task_ids=depends_on_task_ids,
tag_ids=tag_ids,
custom_field_values=custom_field_values or {},
custom_field_values_set=custom_field_values_set,
)
if actor.actor_type == "agent" and actor.agent and actor.agent.is_board_lead:
return await _apply_lead_task_update(session, update=update)
@@ -1142,6 +1478,12 @@ async def delete_task(
col(TagAssignment.task_id) == task.id,
commit=False,
)
await crud.delete_where(
session,
TaskCustomFieldValue,
col(TaskCustomFieldValue.task_id) == task.id,
commit=False,
)
await session.delete(task)
await session.commit()
return OkResponse()
@@ -1306,6 +1648,8 @@ class _TaskUpdateInput:
comment: str | None
depends_on_task_ids: list[UUID] | None
tag_ids: list[UUID] | None
custom_field_values: TaskCustomFieldValues
custom_field_values_set: bool
normalized_tag_ids: list[UUID] | None = None
@@ -1385,6 +1729,11 @@ async def _task_read_response(
board_id=board_id,
dep_ids=dep_ids,
)
custom_field_values_by_task_id = await _task_custom_field_values_by_task_id(
session,
board_id=board_id,
task_ids=[task.id],
)
if task.status == "done":
blocked_ids = []
return TaskRead.model_validate(task, from_attributes=True).model_copy(
@@ -1394,6 +1743,7 @@ async def _task_read_response(
"tags": tag_state.tags,
"blocked_by_task_ids": blocked_ids,
"is_blocked": bool(blocked_ids),
"custom_field_values": custom_field_values_by_task_id.get(task.id, {}),
},
)
@@ -1420,18 +1770,37 @@ def _lead_requested_fields(update: _TaskUpdateInput) -> set[str]:
requested_fields.add("depends_on_task_ids")
if update.tag_ids is not None:
requested_fields.add("tag_ids")
if update.custom_field_values_set:
requested_fields.add("custom_field_values")
return requested_fields
def _validate_lead_update_request(update: _TaskUpdateInput) -> None:
allowed_fields = {"assigned_agent_id", "status", "depends_on_task_ids", "tag_ids"}
allowed_fields = {
"assigned_agent_id",
"status",
"depends_on_task_ids",
"tag_ids",
"custom_field_values",
}
requested_fields = _lead_requested_fields(update)
if update.comment is not None or not requested_fields.issubset(allowed_fields):
if update.comment is not None:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=(
"Board leads can only assign/unassign tasks, update "
"dependencies, or resolve review tasks."
"Lead comment gate failed: board leads cannot include `comment` in task PATCH. "
"Use the task comments endpoint instead."
),
)
disallowed_fields = requested_fields - allowed_fields
if disallowed_fields:
disallowed = ", ".join(sorted(disallowed_fields))
allowed = ", ".join(sorted(allowed_fields))
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=(
"Lead field gate failed: unsupported fields for board leads: "
f"{disallowed}. Allowed fields: {allowed}."
),
)
@@ -1521,13 +1890,19 @@ def _lead_apply_status(update: _TaskUpdateInput) -> None:
if update.task.status != "review":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=("Board leads can only change status when a task is " "in review."),
detail=(
"Lead status gate failed: board leads can only change status when the current "
f"task status is `review` (current: `{update.task.status}`)."
),
)
target_status = _required_status_value(update.updates["status"])
if target_status not in {"done", "inbox"}:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=("Board leads can only move review tasks to done " "or inbox."),
detail=(
"Lead status target gate failed: review tasks can only move to `done` or "
f"`inbox` (requested: `{target_status}`)."
),
)
if target_status == "inbox":
update.task.assigned_agent_id = None
@@ -1587,13 +1962,19 @@ async def _apply_lead_task_update(
update=update,
)
if blocked_by and update.task.status != "done":
update.task.status = "inbox"
update.task.assigned_agent_id = None
update.task.in_progress_at = None
else:
await _lead_apply_assignment(session, update=update)
_lead_apply_status(update)
# Blocked tasks should not be silently rewritten into a "blocked-safe" state.
# Instead, reject assignment/status transitions with an explicit 409 payload.
if blocked_by:
attempted_fields: set[str] = set(update.updates.keys())
attempted_transition = (
"assigned_agent_id" in attempted_fields
or "status" in attempted_fields
)
if attempted_transition:
raise _blocked_task_error(blocked_by)
await _lead_apply_assignment(session, update=update)
_lead_apply_status(update)
await _require_no_pending_approval_for_status_change_when_enabled(
session,
board_id=update.board_id,
@@ -1622,6 +2003,13 @@ async def _apply_lead_task_update(
task_id=update.task.id,
tag_ids=normalized_tag_ids,
)
if update.custom_field_values_set:
await _set_task_custom_field_values_for_update(
session,
board_id=update.board_id,
task_id=update.task.id,
custom_field_values=update.custom_field_values,
)
update.task.updated_at = utcnow()
session.add(update.task)
@@ -1666,7 +2054,7 @@ async def _apply_non_lead_agent_task_rules(
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# Agents are limited to status/comment updates, and non-inbox status moves
# must pass dependency checks before they can proceed.
allowed_fields = {"status", "comment"}
allowed_fields = {"status", "comment", "custom_field_values"}
if (
update.depends_on_task_ids is not None
or update.tag_ids is not None
@@ -1938,6 +2326,14 @@ async def _finalize_updated_task(
tag_ids=normalized or [],
)
if update.custom_field_values_set:
await _set_task_custom_field_values_for_update(
session,
board_id=update.board_id,
task_id=update.task.id,
custom_field_values=update.custom_field_values,
)
session.add(update.task)
await session.commit()
await session.refresh(update.task)

View File

@@ -53,6 +53,17 @@ class Settings(BaseSettings):
# Database lifecycle
db_auto_migrate: bool = False
# Webhook queueing / dispatch
webhook_redis_url: str = "redis://localhost:6379/0"
webhook_queue_name: str = "webhook-dispatch"
webhook_dispatch_schedule_id: str = "webhook-dispatch-batch"
webhook_dispatch_throttle_seconds: float = 2.0
webhook_dispatch_schedule_interval_seconds: int = 900
webhook_dispatch_max_retries: int = 3
# If true, suppress high-volume routine CI telemetry events (e.g. GitHub check_run success)
# from lead notifications. Payloads are still persisted and recorded in board memory.
webhook_dispatch_suppress_routine_events: bool = True
# Logging
log_level: str = "INFO"
log_format: str = "text"

View File

@@ -26,6 +26,7 @@ from app.api.metrics import router as metrics_router
from app.api.organizations import router as organizations_router
from app.api.souls_directory import router as souls_directory_router
from app.api.tags import router as tags_router
from app.api.task_custom_fields import router as task_custom_fields_router
from app.api.tasks import router as tasks_router
from app.api.users import router as users_router
from app.core.config import settings
@@ -145,6 +146,7 @@ api_v1.include_router(board_webhooks_router)
api_v1.include_router(board_onboarding_router)
api_v1.include_router(approvals_router)
api_v1.include_router(tasks_router)
api_v1.include_router(task_custom_fields_router)
api_v1.include_router(tags_router)
api_v1.include_router(users_router)
app.include_router(api_v1)

View File

@@ -19,6 +19,11 @@ from app.models.organization_members import OrganizationMember
from app.models.organizations import Organization
from app.models.tag_assignments import TagAssignment
from app.models.tags import Tag
from app.models.task_custom_fields import (
BoardTaskCustomField,
TaskCustomFieldDefinition,
TaskCustomFieldValue,
)
from app.models.task_dependencies import TaskDependency
from app.models.task_fingerprints import TaskFingerprint
from app.models.tasks import Task
@@ -38,6 +43,9 @@ __all__ = [
"Board",
"Gateway",
"Organization",
"BoardTaskCustomField",
"TaskCustomFieldDefinition",
"TaskCustomFieldValue",
"OrganizationMember",
"OrganizationBoardAccess",
"OrganizationInvite",

View File

@@ -0,0 +1,92 @@
"""Task custom field models and board binding helpers."""
from __future__ import annotations
from datetime import datetime
from uuid import UUID, uuid4
from sqlalchemy import JSON, CheckConstraint, Column, UniqueConstraint
from sqlmodel import Field
from app.core.time import utcnow
from app.models.tenancy import TenantScoped
RUNTIME_ANNOTATION_TYPES = (datetime,)
class TaskCustomFieldDefinition(TenantScoped, table=True):
"""Reusable custom field definition for task metadata."""
__tablename__ = "task_custom_field_definitions" # pyright: ignore[reportAssignmentType]
__table_args__ = (
UniqueConstraint(
"organization_id",
"field_key",
name="uq_task_custom_field_definitions_org_id_field_key",
),
CheckConstraint(
"field_type IN ('text','text_long','integer','decimal','boolean','date','date_time','url','json')",
name="ck_tcf_def_field_type",
),
CheckConstraint(
"ui_visibility IN ('always','if_set','hidden')",
name="ck_tcf_def_ui_visibility",
),
)
id: UUID = Field(default_factory=uuid4, primary_key=True)
organization_id: UUID = Field(foreign_key="organizations.id", index=True)
field_key: str = Field(index=True)
label: str
field_type: str = Field(default="text")
ui_visibility: str = Field(default="always")
validation_regex: str | None = None
description: str | None = None
required: bool = Field(default=False)
default_value: object | None = Field(default=None, sa_column=Column(JSON))
created_at: datetime = Field(default_factory=utcnow)
updated_at: datetime = Field(default_factory=utcnow)
class BoardTaskCustomField(TenantScoped, table=True):
"""Board-level binding of a custom field definition."""
__tablename__ = "board_task_custom_fields" # pyright: ignore[reportAssignmentType]
__table_args__ = (
UniqueConstraint(
"board_id",
"task_custom_field_definition_id",
name="uq_board_task_custom_fields_board_id_task_custom_field_definition_id",
),
)
id: UUID = Field(default_factory=uuid4, primary_key=True)
board_id: UUID = Field(foreign_key="boards.id", index=True)
task_custom_field_definition_id: UUID = Field(
foreign_key="task_custom_field_definitions.id",
index=True,
)
created_at: datetime = Field(default_factory=utcnow)
class TaskCustomFieldValue(TenantScoped, table=True):
"""Stored task-level values for bound custom fields."""
__tablename__ = "task_custom_field_values" # pyright: ignore[reportAssignmentType]
__table_args__ = (
UniqueConstraint(
"task_id",
"task_custom_field_definition_id",
name="uq_task_custom_field_values_task_id_task_custom_field_definition_id",
),
)
id: UUID = Field(default_factory=uuid4, primary_key=True)
task_id: UUID = Field(foreign_key="tasks.id", index=True)
task_custom_field_definition_id: UUID = Field(
foreign_key="task_custom_field_definitions.id",
index=True,
)
value: object | None = Field(default=None, sa_column=Column(JSON))
created_at: datetime = Field(default_factory=utcnow)
updated_at: datetime = Field(default_factory=utcnow)

View File

@@ -6,7 +6,7 @@ from datetime import datetime
from typing import Literal, Self
from uuid import UUID
from pydantic import AliasChoices, Field as PydanticField, model_validator
from pydantic import model_validator
from sqlmodel import Field, SQLModel
ApprovalStatus = Literal["pending", "approved", "rejected"]
@@ -48,19 +48,11 @@ class ApprovalCreate(ApprovalBase):
"""Payload for creating a new approval request."""
agent_id: UUID | None = None
# Back-compat + ergonomics: some clients send lead reasoning as a top-level
# field (`reasoning` / `lead_reasoning`) rather than nesting under payload.reason.
lead_reasoning: str | None = PydanticField(
default=None,
validation_alias=AliasChoices("lead_reasoning", "reasoning", "leadReasoning"),
)
lead_reasoning: str | None = None
@model_validator(mode="after")
def validate_lead_reasoning(self) -> Self:
"""Ensure each approval request includes explicit lead reasoning."""
if isinstance(self.lead_reasoning, str) and self.lead_reasoning.strip():
return self
payload = self.payload
if isinstance(payload, dict):
reason = payload.get("reason")
@@ -71,6 +63,13 @@ class ApprovalCreate(ApprovalBase):
nested_reason = decision.get("reason")
if isinstance(nested_reason, str) and nested_reason.strip():
return self
lead_reasoning = self.lead_reasoning
if isinstance(lead_reasoning, str) and lead_reasoning.strip():
self.payload = {
**(payload if isinstance(payload, dict) else {}),
"reason": lead_reasoning.strip(),
}
return self
raise ValueError(LEAD_REASONING_REQUIRED_ERROR)

View File

@@ -0,0 +1,366 @@
"""Schemas for task custom field metadata, board bindings, and payloads."""
from __future__ import annotations
import re
from datetime import date, datetime
from typing import Literal, Self
from urllib.parse import urlparse
from uuid import UUID
from pydantic import Field, field_validator, model_validator
from sqlmodel import SQLModel
from app.schemas.common import NonEmptyStr
RUNTIME_ANNOTATION_TYPES = (datetime, UUID, date)
TaskCustomFieldType = Literal[
"text",
"text_long",
"integer",
"decimal",
"boolean",
"date",
"date_time",
"url",
"json",
]
TaskCustomFieldUiVisibility = Literal["always", "if_set", "hidden"]
STRING_FIELD_TYPES: set[str] = {"text", "text_long", "date", "date_time", "url"}
TASK_CUSTOM_FIELD_TYPE_ALIASES: dict[str, TaskCustomFieldType] = {
"text": "text",
"text_long": "text_long",
"text (long)": "text_long",
"long_text": "text_long",
"integer": "integer",
"decimal": "decimal",
"boolean": "boolean",
"true/false": "boolean",
"date": "date",
"date_time": "date_time",
"date & time": "date_time",
"datetime": "date_time",
"url": "url",
"json": "json",
}
TASK_CUSTOM_FIELD_UI_VISIBILITY_ALIASES: dict[str, TaskCustomFieldUiVisibility] = {
"always": "always",
"if_set": "if_set",
"if set": "if_set",
"hidden": "hidden",
}
# Reusable alias for task payload payloads containing custom-field values.
TaskCustomFieldValues = dict[str, object | None]
class TaskCustomFieldDefinitionBase(SQLModel):
"""Shared custom field definition properties."""
field_key: str
label: str | None = None
field_type: TaskCustomFieldType = "text"
ui_visibility: TaskCustomFieldUiVisibility = "always"
validation_regex: str | None = None
description: str | None = None
required: bool = False
default_value: object | None = None
@field_validator("field_key", mode="before")
@classmethod
def normalize_field_key(cls, value: object) -> object:
"""Normalize field keys to a stable lowercase representation."""
if not isinstance(value, str):
raise ValueError("field_key must be a string")
normalized = value.strip()
if not normalized:
raise ValueError("field_key is required")
return normalized
@field_validator("label", mode="before")
@classmethod
def normalize_label(cls, value: object) -> object:
"""Normalize labels to a trimmed representation when provided."""
if value is None:
return None
if not isinstance(value, str):
raise ValueError("label must be a string")
normalized = value.strip()
if not normalized:
raise ValueError("label is required")
return normalized
@field_validator("field_type", mode="before")
@classmethod
def normalize_field_type(cls, value: object) -> object:
"""Normalize field type aliases."""
if not isinstance(value, str):
raise ValueError("field_type must be a string")
normalized = value.strip().lower()
resolved = TASK_CUSTOM_FIELD_TYPE_ALIASES.get(normalized)
if resolved is None:
raise ValueError(
"field_type must be one of: text, text_long, integer, decimal, "
"boolean, date, date_time, url, json",
)
return resolved
@field_validator("validation_regex", mode="before")
@classmethod
def normalize_validation_regex(cls, value: object) -> object:
"""Normalize and validate regex pattern syntax."""
if value is None:
return None
if not isinstance(value, str):
raise ValueError("validation_regex must be a string")
normalized = value.strip()
if not normalized:
return None
try:
re.compile(normalized)
except re.error as exc:
raise ValueError(f"validation_regex is invalid: {exc}") from exc
return normalized
@field_validator("ui_visibility", mode="before")
@classmethod
def normalize_ui_visibility(cls, value: object) -> object:
"""Normalize UI visibility aliases."""
if not isinstance(value, str):
raise ValueError("ui_visibility must be a string")
normalized = value.strip().lower()
resolved = TASK_CUSTOM_FIELD_UI_VISIBILITY_ALIASES.get(normalized)
if resolved is None:
raise ValueError("ui_visibility must be one of: always, if_set, hidden")
return resolved
class TaskCustomFieldDefinitionCreate(TaskCustomFieldDefinitionBase):
"""Payload for creating a task custom field definition."""
field_key: NonEmptyStr
label: NonEmptyStr | None = None
board_ids: list[UUID] = Field(min_length=1)
@field_validator("board_ids")
@classmethod
def normalize_board_ids(cls, value: list[UUID]) -> list[UUID]:
"""Remove duplicates while preserving user-supplied order."""
deduped = list(dict.fromkeys(value))
if not deduped:
raise ValueError("board_ids must include at least one board")
return deduped
@model_validator(mode="after")
def default_label_to_field_key(self) -> Self:
"""Default labels to field_key when omitted by older clients."""
if self.label is None:
self.label = self.field_key
return self
@model_validator(mode="after")
def validate_regex_field_type_combo(self) -> Self:
"""Restrict regex validation to string-compatible field types."""
if self.validation_regex is not None and self.field_type not in STRING_FIELD_TYPES:
raise ValueError(
"validation_regex is only supported for string field types.",
)
return self
class TaskCustomFieldDefinitionUpdate(SQLModel):
"""Payload for editing an existing task custom field definition."""
label: NonEmptyStr | None = None
field_type: TaskCustomFieldType | None = None
ui_visibility: TaskCustomFieldUiVisibility | None = None
validation_regex: str | None = None
description: str | None = None
required: bool | None = None
default_value: object | None = None
board_ids: list[UUID] | None = None
@field_validator("board_ids")
@classmethod
def normalize_board_ids(cls, value: list[UUID] | None) -> list[UUID] | None:
"""Normalize board bindings when provided in updates."""
if value is None:
return None
deduped = list(dict.fromkeys(value))
if not deduped:
raise ValueError("board_ids must include at least one board")
return deduped
@field_validator("field_type", mode="before")
@classmethod
def normalize_optional_field_type(cls, value: object) -> object:
"""Normalize optional field type aliases."""
if value is None:
return None
return TaskCustomFieldDefinitionBase.normalize_field_type(value)
@field_validator("validation_regex", mode="before")
@classmethod
def normalize_optional_validation_regex(cls, value: object) -> object:
"""Normalize and validate optional regex pattern syntax."""
if value is None:
return None
return TaskCustomFieldDefinitionBase.normalize_validation_regex(value)
@field_validator("ui_visibility", mode="before")
@classmethod
def normalize_optional_ui_visibility(cls, value: object) -> object:
"""Normalize optional UI visibility aliases."""
if value is None:
return None
return TaskCustomFieldDefinitionBase.normalize_ui_visibility(value)
@model_validator(mode="before")
@classmethod
def reject_field_key_update(cls, value: object) -> object:
"""Disallow field_key updates after definition creation."""
if isinstance(value, dict) and "field_key" in value:
raise ValueError("field_key cannot be changed after creation.")
return value
@model_validator(mode="after")
def reject_null_for_non_nullable_fields(self) -> Self:
"""Reject explicit null for non-nullable update fields."""
non_nullable_fields = ("label", "field_type", "ui_visibility", "required")
invalid = [
field_name
for field_name in non_nullable_fields
if field_name in self.model_fields_set and getattr(self, field_name) is None
]
if invalid:
raise ValueError(
f"{', '.join(invalid)} cannot be null; omit the field to leave it unchanged",
)
return self
@model_validator(mode="after")
def require_some_update(self) -> Self:
"""Reject empty updates to avoid no-op requests."""
if not self.model_fields_set:
raise ValueError("At least one field is required")
return self
class TaskCustomFieldDefinitionRead(TaskCustomFieldDefinitionBase):
"""Payload returned for custom field definitions."""
id: UUID
organization_id: UUID
label: str
field_type: TaskCustomFieldType
ui_visibility: TaskCustomFieldUiVisibility
validation_regex: str | None = None
board_ids: list[UUID] = Field(default_factory=list)
created_at: datetime
updated_at: datetime
class BoardTaskCustomFieldCreate(SQLModel):
"""Payload for binding a definition to a board."""
task_custom_field_definition_id: UUID
class BoardTaskCustomFieldRead(SQLModel):
"""Payload returned when listing board-bound custom fields."""
id: UUID
board_id: UUID
task_custom_field_definition_id: UUID
field_key: str
label: str
field_type: TaskCustomFieldType
ui_visibility: TaskCustomFieldUiVisibility
validation_regex: str | None
description: str | None
required: bool
default_value: object | None
created_at: datetime
class TaskCustomFieldValuesPayload(SQLModel):
"""Payload for setting all custom-field values at once."""
custom_field_values: TaskCustomFieldValues = Field(default_factory=dict)
def _parse_iso_datetime(value: str) -> datetime:
normalized = value.strip()
if normalized.endswith("Z"):
normalized = f"{normalized[:-1]}+00:00"
return datetime.fromisoformat(normalized)
def validate_custom_field_value(
*,
field_type: TaskCustomFieldType,
value: object | None,
validation_regex: str | None = None,
) -> None:
"""Validate a custom field value against field type and optional regex."""
if value is None:
return
if field_type in {"text", "text_long"}:
if not isinstance(value, str):
raise ValueError("must be a string")
elif field_type == "integer":
if not isinstance(value, int) or isinstance(value, bool):
raise ValueError("must be an integer")
elif field_type == "decimal":
if (not isinstance(value, (int, float))) or isinstance(value, bool):
raise ValueError("must be a decimal number")
elif field_type == "boolean":
if not isinstance(value, bool):
raise ValueError("must be true or false")
elif field_type == "date":
if not isinstance(value, str):
raise ValueError("must be an ISO date string (YYYY-MM-DD)")
try:
date.fromisoformat(value)
except ValueError as exc:
raise ValueError("must be an ISO date string (YYYY-MM-DD)") from exc
elif field_type == "date_time":
if not isinstance(value, str):
raise ValueError("must be an ISO datetime string")
try:
_parse_iso_datetime(value)
except ValueError as exc:
raise ValueError("must be an ISO datetime string") from exc
elif field_type == "url":
if not isinstance(value, str):
raise ValueError("must be a URL string")
parsed = urlparse(value)
if parsed.scheme not in {"http", "https"} or not parsed.netloc:
raise ValueError("must be a valid http/https URL")
elif field_type == "json":
if not isinstance(value, (dict, list)):
raise ValueError("must be a JSON object or array")
if validation_regex is not None and field_type in STRING_FIELD_TYPES:
if not isinstance(value, str):
raise ValueError("must be a string for regex validation")
if re.fullmatch(validation_regex, value) is None:
raise ValueError("does not match validation_regex")
def validate_custom_field_definition(
*,
field_type: TaskCustomFieldType,
validation_regex: str | None,
default_value: object | None,
) -> None:
"""Validate field definition constraints and default-value compatibility."""
if validation_regex is not None and field_type not in STRING_FIELD_TYPES:
raise ValueError("validation_regex is only supported for string field types.")
validate_custom_field_value(
field_type=field_type,
value=default_value,
validation_regex=validation_regex,
)

View File

@@ -11,6 +11,7 @@ from sqlmodel import Field, SQLModel
from app.schemas.common import NonEmptyStr
from app.schemas.tags import TagRef
from app.schemas.task_custom_fields import TaskCustomFieldValues
TaskStatus = Literal["inbox", "in_progress", "review", "done"]
STATUS_REQUIRED_ERROR = "status is required"
@@ -36,6 +37,7 @@ class TaskCreate(TaskBase):
"""Payload for creating a task."""
created_by_user_id: UUID | None = None
custom_field_values: TaskCustomFieldValues = Field(default_factory=dict)
class TaskUpdate(SQLModel):
@@ -49,6 +51,7 @@ class TaskUpdate(SQLModel):
assigned_agent_id: UUID | None = None
depends_on_task_ids: list[UUID] | None = None
tag_ids: list[UUID] | None = None
custom_field_values: TaskCustomFieldValues | None = None
comment: NonEmptyStr | None = None
@field_validator("comment", mode="before")
@@ -81,6 +84,7 @@ class TaskRead(TaskBase):
blocked_by_task_ids: list[UUID] = Field(default_factory=list)
is_blocked: bool = False
tags: list[TagRef] = Field(default_factory=list)
custom_field_values: TaskCustomFieldValues | None = None
class TaskCommentCreate(SQLModel):

View File

@@ -22,6 +22,11 @@ from app.core.logging import TRACE_LEVEL, get_logger
PROTOCOL_VERSION = 3
logger = get_logger(__name__)
GATEWAY_OPERATOR_SCOPES = (
"operator.admin",
"operator.approvals",
"operator.pairing",
)
# NOTE: These are the base gateway methods from the OpenClaw gateway repo.
# The gateway can expose additional methods at runtime via channel plugins.
@@ -229,6 +234,8 @@ def _build_connect_params(config: GatewayConfig) -> dict[str, Any]:
params: dict[str, Any] = {
"minProtocol": PROTOCOL_VERSION,
"maxProtocol": PROTOCOL_VERSION,
"role": "operator",
"scopes": list(GATEWAY_OPERATOR_SCOPES),
"client": {
"id": "gateway-client",
"version": "1.0.0",

View File

@@ -0,0 +1,3 @@
"""Webhook queueing and dispatch utilities."""
__all__ = ["dispatch", "queue", "scheduler"]

View File

@@ -0,0 +1,334 @@
"""Webhook dispatch worker routines."""
from __future__ import annotations
import asyncio
import time
from sqlmodel.ext.asyncio.session import AsyncSession
from uuid import UUID
from app.core.config import settings
from app.core.logging import get_logger
from app.db.session import async_session_maker
from app.models.agents import Agent
from app.models.board_webhook_payloads import BoardWebhookPayload
from app.models.board_webhooks import BoardWebhook
from app.models.boards import Board
from app.services.openclaw.gateway_dispatch import GatewayDispatchService
from app.services.webhooks.queue import (
QueuedWebhookDelivery,
dequeue_webhook_delivery,
requeue_if_failed,
)
logger = get_logger(__name__)
_ROUTINE_GITHUB_EVENTS = frozenset({"check_run", "check_suite", "workflow_run"})
_SUCCESS_GITHUB_CONCLUSIONS = frozenset({None, "success", "neutral", "skipped"})
# Consider these actionable enough to page the lead / surface in task threads.
_ACTIONABLE_GITHUB_CONCLUSIONS = frozenset(
{
"failure",
"cancelled",
"timed_out",
"action_required",
"stale",
"startup_failure",
},
)
def _as_dict(value: object) -> dict[str, object] | None:
if isinstance(value, dict):
# Keep only string keys; payloads can include non-str keys in edge cases.
normalized: dict[str, object] = {}
for k, v in value.items():
if isinstance(k, str):
normalized[k] = v
return normalized
return None
def _str_or_none(value: object) -> str | None:
if value is None:
return None
if isinstance(value, str):
return value
return str(value)
def _extract_github_conclusion(payload: dict[str, object], *, key: str) -> str | None:
container = _as_dict(payload.get(key))
if not container:
return None
return _str_or_none(container.get("conclusion"))
def _extract_github_status(payload: dict[str, object], *, key: str) -> str | None:
container = _as_dict(payload.get(key))
if not container:
return None
return _str_or_none(container.get("status"))
def _should_suppress_routine_delivery(
*,
payload_event: str | None,
payload_value: object,
) -> bool:
"""Return True if this delivery is routine noise and should not notify leads.
This intentionally only targets high-volume GitHub CI telemetry events.
We still persist the webhook payload + board memory entry for audit/debug.
"""
if not settings.webhook_dispatch_suppress_routine_events:
return False
if payload_event not in _ROUTINE_GITHUB_EVENTS:
return False
payload = _as_dict(payload_value)
if payload is None:
return False
action = _str_or_none(payload.get("action"))
# If GitHub hasn't marked it completed, it's almost always noise.
if action and action != "completed":
return True
if payload_event == "workflow_run":
status = _extract_github_status(payload, key="workflow_run")
if status and status != "completed":
return True
conclusion = _extract_github_conclusion(payload, key="workflow_run")
elif payload_event == "check_run":
status = _extract_github_status(payload, key="check_run")
if status and status != "completed":
return True
conclusion = _extract_github_conclusion(payload, key="check_run")
else: # check_suite
status = _extract_github_status(payload, key="check_suite")
if status and status != "completed":
return True
conclusion = _extract_github_conclusion(payload, key="check_suite")
if conclusion in _SUCCESS_GITHUB_CONCLUSIONS:
return True
# Only page on explicitly non-success conclusions.
return conclusion not in _ACTIONABLE_GITHUB_CONCLUSIONS
def _build_payload_preview(payload_value: object) -> str:
if isinstance(payload_value, str):
return payload_value
try:
import json
return json.dumps(payload_value, indent=2, ensure_ascii=True)
except TypeError:
return str(payload_value)
def _payload_preview(payload_value: object) -> str:
preview = _build_payload_preview(payload_value)
if len(preview) <= 1600:
return preview
return f"{preview[:1597]}..."
def _webhook_message(
*,
board: Board,
webhook: BoardWebhook,
payload: BoardWebhookPayload,
) -> str:
preview = _payload_preview(payload.payload)
return (
"WEBHOOK EVENT RECEIVED\n"
f"Board: {board.name}\n"
f"Webhook ID: {webhook.id}\n"
f"Payload ID: {payload.id}\n"
f"Instruction: {webhook.description}\n\n"
"Take action:\n"
"1) Triage this payload against the webhook instruction.\n"
"2) Create/update tasks as needed.\n"
f"3) Reference payload ID {payload.id} in task descriptions.\n\n"
"Payload preview:\n"
f"{preview}\n\n"
"To inspect board memory entries:\n"
f"GET /api/v1/agent/boards/{board.id}/memory?is_chat=false"
)
async def _notify_lead(
*,
session: AsyncSession,
board: Board,
webhook: BoardWebhook,
payload: BoardWebhookPayload,
) -> None:
lead = await Agent.objects.filter_by(board_id=board.id, is_board_lead=True).first(session)
if lead is None or not lead.openclaw_session_id:
return
dispatch = GatewayDispatchService(session)
config = await dispatch.optional_gateway_config_for_board(board)
if config is None:
return
message = _webhook_message(board=board, webhook=webhook, payload=payload)
await dispatch.try_send_agent_message(
session_key=lead.openclaw_session_id,
config=config,
agent_name=lead.name,
message=message,
deliver=False,
)
async def _load_webhook_payload(
*,
session: AsyncSession,
payload_id: UUID,
webhook_id: UUID,
board_id: UUID,
) -> tuple[Board, BoardWebhook, BoardWebhookPayload] | None:
payload = await session.get(BoardWebhookPayload, payload_id)
if payload is None:
logger.warning(
"webhook.queue.payload_missing",
extra={
"payload_id": str(payload_id),
"webhook_id": str(webhook_id),
"board_id": str(board_id),
},
)
return None
if payload.board_id != board_id or payload.webhook_id != webhook_id:
logger.warning(
"webhook.queue.payload_mismatch",
extra={
"payload_id": str(payload_id),
"payload_webhook_id": str(payload.webhook_id),
"payload_board_id": str(payload.board_id),
},
)
return None
board = await Board.objects.by_id(board_id).first(session)
if board is None:
logger.warning(
"webhook.queue.board_missing",
extra={"board_id": str(board_id), "payload_id": str(payload_id)},
)
return None
webhook = await session.get(BoardWebhook, webhook_id)
if webhook is None:
logger.warning(
"webhook.queue.webhook_missing",
extra={"webhook_id": str(webhook_id), "board_id": str(board_id)},
)
return None
if webhook.board_id != board_id:
logger.warning(
"webhook.queue.webhook_board_mismatch",
extra={
"webhook_id": str(webhook_id),
"payload_board_id": str(payload.board_id),
"expected_board_id": str(board_id),
},
)
return None
return board, webhook, payload
async def _process_single_item(item: QueuedWebhookDelivery) -> None:
async with async_session_maker() as session:
loaded = await _load_webhook_payload(
session=session,
payload_id=item.payload_id,
webhook_id=item.webhook_id,
board_id=item.board_id,
)
if loaded is None:
return
board, webhook, payload = loaded
if _should_suppress_routine_delivery(
payload_event=item.payload_event,
payload_value=payload.payload,
):
logger.info(
"webhook.dispatch.suppressed_routine",
extra={
"payload_id": str(item.payload_id),
"webhook_id": str(item.webhook_id),
"board_id": str(item.board_id),
"event": item.payload_event,
"attempt": item.attempts,
},
)
return
await _notify_lead(session=session, board=board, webhook=webhook, payload=payload)
await session.commit()
async def flush_webhook_delivery_queue() -> None:
"""Consume queued webhook events and notify board leads in a throttled batch."""
processed = 0
while True:
try:
item = dequeue_webhook_delivery()
except Exception:
logger.exception("webhook.dispatch.dequeue_failed")
continue
if item is None:
break
try:
await _process_single_item(item)
processed += 1
logger.info(
"webhook.dispatch.success",
extra={
"payload_id": str(item.payload_id),
"webhook_id": str(item.webhook_id),
"board_id": str(item.board_id),
"attempt": item.attempts,
},
)
except Exception as exc:
logger.exception(
"webhook.dispatch.failed",
extra={
"payload_id": str(item.payload_id),
"webhook_id": str(item.webhook_id),
"board_id": str(item.board_id),
"attempt": item.attempts,
"error": str(exc),
},
)
requeue_if_failed(item)
time.sleep(settings.webhook_dispatch_throttle_seconds)
logger.info("webhook.dispatch.batch_complete", extra={"count": processed})
def run_flush_webhook_delivery_queue() -> None:
"""RQ entrypoint for running the async queue flush from worker jobs."""
logger.info(
"webhook.dispatch.batch_started",
extra={"throttle_seconds": settings.webhook_dispatch_throttle_seconds},
)
start = time.time()
asyncio.run(flush_webhook_delivery_queue())
elapsed_ms = int((time.time() - start) * 1000)
logger.info("webhook.dispatch.batch_finished", extra={"duration_ms": elapsed_ms})

View File

@@ -0,0 +1,136 @@
"""Webhook queue persistence and delivery helpers."""
from __future__ import annotations
import json
from dataclasses import dataclass
from datetime import datetime
from typing import Any
from uuid import UUID
from typing import cast
import redis
from app.core.config import settings
from app.core.logging import get_logger
logger = get_logger(__name__)
@dataclass(frozen=True)
class QueuedWebhookDelivery:
"""Payload metadata stored for deferred webhook lead dispatch."""
board_id: UUID
webhook_id: UUID
payload_id: UUID
payload_event: str | None
received_at: datetime
attempts: int = 0
def to_json(self) -> str:
return json.dumps(
{
"board_id": str(self.board_id),
"webhook_id": str(self.webhook_id),
"payload_id": str(self.payload_id),
"payload_event": self.payload_event,
"received_at": self.received_at.isoformat(),
"attempts": self.attempts,
},
sort_keys=True,
)
def _redis_client() -> redis.Redis:
return redis.Redis.from_url(settings.webhook_redis_url)
def enqueue_webhook_delivery(payload: QueuedWebhookDelivery) -> bool:
"""Persist webhook metadata in a Redis queue for batch dispatch."""
try:
client = _redis_client()
client.lpush(settings.webhook_queue_name, payload.to_json())
logger.info(
"webhook.queue.enqueued",
extra={
"board_id": str(payload.board_id),
"webhook_id": str(payload.webhook_id),
"payload_id": str(payload.payload_id),
"attempt": payload.attempts,
},
)
return True
except Exception as exc:
logger.warning(
"webhook.queue.enqueue_failed",
extra={
"board_id": str(payload.board_id),
"webhook_id": str(payload.webhook_id),
"payload_id": str(payload.payload_id),
"error": str(exc),
},
)
return False
def dequeue_webhook_delivery() -> QueuedWebhookDelivery | None:
"""Pop one queued webhook delivery payload."""
client = _redis_client()
raw = cast(str | bytes | None, client.rpop(settings.webhook_queue_name))
if raw is None:
return None
if isinstance(raw, bytes):
raw = raw.decode("utf-8")
try:
payload: dict[str, Any] = json.loads(raw)
event = payload.get("payload_event")
if event is not None:
event = str(event)
return QueuedWebhookDelivery(
board_id=UUID(payload["board_id"]),
webhook_id=UUID(payload["webhook_id"]),
payload_id=UUID(payload["payload_id"]),
payload_event=event,
received_at=datetime.fromisoformat(payload["received_at"]),
attempts=int(payload.get("attempts", 0)),
)
except Exception as exc:
logger.error(
"webhook.queue.dequeue_failed",
extra={"raw_payload": str(raw), "error": str(exc)},
)
raise
def _requeue_with_attempt(payload: QueuedWebhookDelivery) -> None:
payload = QueuedWebhookDelivery(
board_id=payload.board_id,
webhook_id=payload.webhook_id,
payload_id=payload.payload_id,
payload_event=payload.payload_event,
received_at=payload.received_at,
attempts=payload.attempts + 1,
)
enqueue_webhook_delivery(payload)
def requeue_if_failed(payload: QueuedWebhookDelivery) -> bool:
"""Requeue payload delivery with capped retries.
Returns True if requeued.
"""
if payload.attempts >= settings.webhook_dispatch_max_retries:
logger.warning(
"webhook.queue.drop_failed_delivery",
extra={
"board_id": str(payload.board_id),
"webhook_id": str(payload.webhook_id),
"payload_id": str(payload.payload_id),
"attempts": payload.attempts,
},
)
return False
_requeue_with_attempt(payload)
return True

View File

@@ -0,0 +1,36 @@
"""Webhook dispatch scheduler bootstrap for rq-scheduler."""
from __future__ import annotations
from datetime import datetime, timedelta, timezone
from redis import Redis
from rq_scheduler import Scheduler # type: ignore[import-untyped]
from app.core.config import settings
from app.services.webhooks import dispatch
def bootstrap_webhook_dispatch_schedule(interval_seconds: int | None = None) -> None:
"""Register a recurring queue-flush job and keep it idempotent."""
connection = Redis.from_url(settings.webhook_redis_url)
scheduler = Scheduler(queue_name=settings.webhook_queue_name, connection=connection)
for job in scheduler.get_jobs():
if job.id == settings.webhook_dispatch_schedule_id:
scheduler.cancel(job)
effective_interval_seconds = (
settings.webhook_dispatch_schedule_interval_seconds
if interval_seconds is None
else interval_seconds
)
scheduler.schedule(
datetime.now(tz=timezone.utc) + timedelta(seconds=5),
func=dispatch.run_flush_webhook_delivery_queue,
interval=effective_interval_seconds,
repeat=None,
id=settings.webhook_dispatch_schedule_id,
queue_name=settings.webhook_queue_name,
)

View File

@@ -1,26 +0,0 @@
"""merge heads for activity_events index
Revision ID: 836cf8009001
Revises: b05c7b628636, fa6e83f8d9a1
Create Date: 2026-02-13 10:57:21.395382
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '836cf8009001'
down_revision = ('b05c7b628636', 'fa6e83f8d9a1')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -1,7 +1,7 @@
"""add activity_events event_type created_at index
Revision ID: b05c7b628636
Revises: bbd5bbb26d97
Revises: b6f4c7d9e1a2
Create Date: 2026-02-12 09:54:32.359256
"""
@@ -13,7 +13,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b05c7b628636'
down_revision = 'bbd5bbb26d97'
down_revision = 'b6f4c7d9e1a2'
branch_labels = None
depends_on = None

View File

@@ -0,0 +1,141 @@
"""Add task custom field tables.
Revision ID: b6f4c7d9e1a2
Revises: 1a7b2c3d4e5f
Create Date: 2026-02-13 00:20:00.000000
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "b6f4c7d9e1a2"
down_revision = "1a7b2c3d4e5f"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create task custom-field definition, binding, and value tables."""
op.create_table(
"task_custom_field_definitions",
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("organization_id", sa.Uuid(), nullable=False),
sa.Column("field_key", sa.String(), nullable=False),
sa.Column("label", sa.String(), nullable=False),
sa.Column(
"field_type",
sa.String(),
nullable=False,
server_default=sa.text("'text'"),
),
sa.Column(
"ui_visibility",
sa.String(),
nullable=False,
server_default=sa.text("'always'"),
),
sa.Column("validation_regex", sa.String(), nullable=True),
sa.Column("description", sa.String(), nullable=True),
sa.Column("required", sa.Boolean(), nullable=False),
sa.Column("default_value", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"organization_id",
"field_key",
name="uq_tcf_def_org_key",
),
sa.CheckConstraint(
"field_type IN "
"('text','text_long','integer','decimal','boolean','date','date_time','url','json')",
name="ck_tcf_def_field_type",
),
sa.CheckConstraint(
"ui_visibility IN ('always','if_set','hidden')",
name="ck_tcf_def_ui_visibility",
),
)
op.create_index(
"ix_task_custom_field_definitions_organization_id",
"task_custom_field_definitions",
["organization_id"],
)
op.create_index(
"ix_task_custom_field_definitions_field_key",
"task_custom_field_definitions",
["field_key"],
)
op.create_table(
"board_task_custom_fields",
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("board_id", sa.Uuid(), nullable=False),
sa.Column("task_custom_field_definition_id", sa.Uuid(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["board_id"], ["boards.id"]),
sa.ForeignKeyConstraint(
["task_custom_field_definition_id"],
["task_custom_field_definitions.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"board_id",
"task_custom_field_definition_id",
name="uq_board_tcf_binding",
),
)
op.create_index(
"ix_board_task_custom_fields_board_id",
"board_task_custom_fields",
["board_id"],
)
op.create_index(
"ix_board_task_custom_fields_task_custom_field_definition_id",
"board_task_custom_fields",
["task_custom_field_definition_id"],
)
op.create_table(
"task_custom_field_values",
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("task_id", sa.Uuid(), nullable=False),
sa.Column("task_custom_field_definition_id", sa.Uuid(), nullable=False),
sa.Column("value", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["task_id"], ["tasks.id"]),
sa.ForeignKeyConstraint(
["task_custom_field_definition_id"],
["task_custom_field_definitions.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"task_id",
"task_custom_field_definition_id",
name="uq_tcf_values_task_def",
),
)
op.create_index(
"ix_task_custom_field_values_task_id",
"task_custom_field_values",
["task_id"],
)
op.create_index(
"ix_task_custom_field_values_task_custom_field_definition_id",
"task_custom_field_values",
["task_custom_field_definition_id"],
)
def downgrade() -> None:
"""Drop task custom field tables."""
op.drop_table("task_custom_field_values")
op.drop_table("board_task_custom_fields")
op.drop_table("task_custom_field_definitions")

View File

@@ -1,26 +0,0 @@
"""merge heads
Revision ID: bbd5bbb26d97
Revises: 99cd6df95f85, b4338be78eec
Create Date: 2026-02-12 09:54:21.149702
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bbd5bbb26d97'
down_revision = ('99cd6df95f85', 'b4338be78eec')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -1,26 +0,0 @@
"""merge heads after board lead rule
Revision ID: d3ca36cf31a1
Revises: 1a7b2c3d4e5f, 836cf8009001
Create Date: 2026-02-13 11:02:04.893298
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd3ca36cf31a1'
down_revision = ('1a7b2c3d4e5f', '836cf8009001')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -25,6 +25,9 @@ dependencies = [
"sse-starlette==3.2.0",
"uvicorn[standard]==0.40.0",
"websockets==16.0",
"redis==6.3.0",
"rq==2.6.0",
"rq-scheduler==0.14.0",
]
[project.optional-dependencies]

View File

@@ -43,7 +43,9 @@ def main() -> int:
return 1
if len(heads) > 1 and not allow_multiple_heads:
print("ERROR: multiple Alembic heads detected (set ALLOW_MULTIPLE_HEADS=true only for intentional merge windows)")
print(
"ERROR: multiple Alembic heads detected (set ALLOW_MULTIPLE_HEADS=true only for intentional merge windows)"
)
for h in heads:
print(f" - {h}")
return 1

View File

@@ -38,17 +38,6 @@ def test_approval_create_requires_lead_reasoning() -> None:
)
def test_approval_create_accepts_top_level_reasoning_alias() -> None:
model = ApprovalCreate.model_validate(
{
"action_type": "task.update",
"confidence": 80,
"reasoning": "Lead says OK.",
},
)
assert model.lead_reasoning == "Lead says OK."
def test_approval_create_accepts_nested_decision_reason() -> None:
model = ApprovalCreate.model_validate(
{
@@ -69,3 +58,16 @@ def test_approval_create_accepts_float_confidence() -> None:
},
)
assert model.confidence == 88.75
def test_approval_create_accepts_top_level_lead_reasoning() -> None:
model = ApprovalCreate.model_validate(
{
"action_type": "task.update",
"confidence": 80,
"lead_reasoning": "Need manual review before changing task status.",
},
)
assert model.payload == {
"reason": "Need manual review before changing task status.",
}

View File

@@ -23,6 +23,7 @@ from app.models.board_webhooks import BoardWebhook
from app.models.boards import Board
from app.models.gateways import Gateway
from app.models.organizations import Organization
from app.services.webhooks.queue import QueuedWebhookDelivery
async def _make_engine() -> AsyncEngine:
@@ -112,7 +113,7 @@ async def _seed_webhook(
@pytest.mark.asyncio
async def test_ingest_board_webhook_stores_payload_and_notifies_lead(
async def test_ingest_board_webhook_stores_payload_and_enqueues_for_lead_dispatch(
monkeypatch: pytest.MonkeyPatch,
) -> None:
engine = await _make_engine()
@@ -122,16 +123,23 @@ async def test_ingest_board_webhook_stores_payload_and_notifies_lead(
expire_on_commit=False,
)
app = _build_test_app(session_maker)
enqueued: list[dict[str, object]] = []
sent_messages: list[dict[str, str]] = []
async with session_maker() as session:
board, webhook = await _seed_webhook(session, enabled=True)
async def _fake_optional_gateway_config_for_board(
self: board_webhooks.GatewayDispatchService,
_board: Board,
) -> object:
return object()
def _fake_enqueue(payload: QueuedWebhookDelivery) -> bool:
enqueued.append(
{
"board_id": str(payload.board_id),
"webhook_id": str(payload.webhook_id),
"payload_id": str(payload.payload_id),
"attempts": payload.attempts,
"event": payload.payload_event,
},
)
return True
async def _fake_try_send_agent_message(
self: board_webhooks.GatewayDispatchService,
@@ -145,7 +153,7 @@ async def test_ingest_board_webhook_stores_payload_and_notifies_lead(
del self, config, deliver
sent_messages.append(
{
"session_key": session_key,
"session_id": session_key,
"agent_name": agent_name,
"message": message,
},
@@ -153,9 +161,9 @@ async def test_ingest_board_webhook_stores_payload_and_notifies_lead(
return None
monkeypatch.setattr(
board_webhooks.GatewayDispatchService,
"optional_gateway_config_for_board",
_fake_optional_gateway_config_for_board,
board_webhooks,
"enqueue_webhook_delivery",
_fake_enqueue,
)
monkeypatch.setattr(
board_webhooks.GatewayDispatchService,
@@ -204,11 +212,12 @@ async def test_ingest_board_webhook_stores_payload_and_notifies_lead(
assert f"payload:{payload_id}" in memory_items[0].tags
assert f"Payload ID: {payload_id}" in memory_items[0].content
assert len(sent_messages) == 1
assert sent_messages[0]["session_key"] == "lead:session:key"
assert "WEBHOOK EVENT RECEIVED" in sent_messages[0]["message"]
assert str(payload_id) in sent_messages[0]["message"]
assert webhook.description in sent_messages[0]["message"]
assert len(enqueued) == 1
assert enqueued[0]["board_id"] == str(board.id)
assert enqueued[0]["webhook_id"] == str(webhook.id)
assert enqueued[0]["payload_id"] == str(payload_id)
assert len(sent_messages) == 0
finally:
await engine.dispose()

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from app.services.openclaw.gateway_rpc import (
GATEWAY_OPERATOR_SCOPES,
GatewayConfig,
_build_connect_params,
)
def test_build_connect_params_sets_explicit_operator_role_and_scopes() -> None:
params = _build_connect_params(GatewayConfig(url="ws://gateway.example/ws"))
assert params["role"] == "operator"
assert params["scopes"] == list(GATEWAY_OPERATOR_SCOPES)
assert "auth" not in params
def test_build_connect_params_includes_auth_token_when_provided() -> None:
params = _build_connect_params(
GatewayConfig(url="ws://gateway.example/ws", token="secret-token"),
)
assert params["auth"] == {"token": "secret-token"}
assert params["scopes"] == list(GATEWAY_OPERATOR_SCOPES)

View File

@@ -0,0 +1,202 @@
# ruff: noqa: INP001
from __future__ import annotations
from uuid import uuid4
import pytest
from fastapi import HTTPException
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel, col, select
from sqlmodel.ext.asyncio.session import AsyncSession
from app.api.deps import ActorContext
from app.api.tasks import _TaskUpdateInput, _apply_lead_task_update
from app.models.agents import Agent
from app.models.boards import Board
from app.models.organizations import Organization
from app.models.task_dependencies import TaskDependency
from app.models.tasks import Task
async def _make_engine() -> AsyncEngine:
engine = create_async_engine("sqlite+aiosqlite:///:memory:")
async with engine.connect() as conn, conn.begin():
await conn.run_sync(SQLModel.metadata.create_all)
return engine
async def _make_session(engine: AsyncEngine) -> AsyncSession:
return AsyncSession(engine, expire_on_commit=False)
@pytest.mark.asyncio
async def test_lead_update_rejects_assignment_change_when_task_blocked() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
org_id = uuid4()
board_id = uuid4()
lead_id = uuid4()
worker_id = uuid4()
dep_id = uuid4()
task_id = uuid4()
session.add(Organization(id=org_id, name="org"))
session.add(Board(id=board_id, organization_id=org_id, name="b", slug="b"))
session.add(
Agent(
id=lead_id,
name="Lead",
board_id=board_id,
gateway_id=uuid4(),
is_board_lead=True,
openclaw_session_id="agent:lead:session",
),
)
session.add(
Agent(
id=worker_id,
name="Worker",
board_id=board_id,
gateway_id=uuid4(),
is_board_lead=False,
openclaw_session_id="agent:worker:session",
),
)
session.add(Task(id=dep_id, board_id=board_id, title="dep", description=None))
session.add(
Task(
id=task_id,
board_id=board_id,
title="t",
description=None,
status="review",
assigned_agent_id=None,
),
)
session.add(
TaskDependency(
board_id=board_id,
task_id=task_id,
depends_on_task_id=dep_id,
),
)
await session.commit()
lead = (await session.exec(select(Agent).where(col(Agent.id) == lead_id))).first()
task = (await session.exec(select(Task).where(col(Task.id) == task_id))).first()
assert lead is not None
assert task is not None
update = _TaskUpdateInput(
task=task,
actor=ActorContext(actor_type="agent", agent=lead),
board_id=board_id,
previous_status=task.status,
previous_assigned=task.assigned_agent_id,
status_requested=False,
updates={"assigned_agent_id": worker_id},
comment=None,
depends_on_task_ids=None,
tag_ids=None,
custom_field_values={},
custom_field_values_set=False,
)
with pytest.raises(HTTPException) as exc:
await _apply_lead_task_update(session, update=update)
assert exc.value.status_code == 409
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["code"] == "task_blocked_cannot_transition"
assert detail["blocked_by_task_ids"] == [str(dep_id)]
# DB unchanged
reloaded = (await session.exec(select(Task).where(col(Task.id) == task_id))).first()
assert reloaded is not None
assert reloaded.status == "review"
assert reloaded.assigned_agent_id is None
finally:
await engine.dispose()
@pytest.mark.asyncio
async def test_lead_update_rejects_status_change_when_task_blocked() -> None:
engine = await _make_engine()
try:
async with await _make_session(engine) as session:
org_id = uuid4()
board_id = uuid4()
lead_id = uuid4()
dep_id = uuid4()
task_id = uuid4()
session.add(Organization(id=org_id, name="org"))
session.add(Board(id=board_id, organization_id=org_id, name="b", slug="b"))
session.add(
Agent(
id=lead_id,
name="Lead",
board_id=board_id,
gateway_id=uuid4(),
is_board_lead=True,
openclaw_session_id="agent:lead:session",
),
)
session.add(Task(id=dep_id, board_id=board_id, title="dep", description=None))
session.add(
Task(
id=task_id,
board_id=board_id,
title="t",
description=None,
status="review",
),
)
session.add(
TaskDependency(
board_id=board_id,
task_id=task_id,
depends_on_task_id=dep_id,
),
)
await session.commit()
lead = (await session.exec(select(Agent).where(col(Agent.id) == lead_id))).first()
task = (await session.exec(select(Task).where(col(Task.id) == task_id))).first()
assert lead is not None
assert task is not None
update = _TaskUpdateInput(
task=task,
actor=ActorContext(actor_type="agent", agent=lead),
board_id=board_id,
previous_status=task.status,
previous_assigned=task.assigned_agent_id,
status_requested=True,
updates={"status": "done"},
comment=None,
depends_on_task_ids=None,
tag_ids=None,
custom_field_values={},
custom_field_values_set=False,
)
with pytest.raises(HTTPException) as exc:
await _apply_lead_task_update(session, update=update)
assert exc.value.status_code == 409
detail = exc.value.detail
assert isinstance(detail, dict)
assert detail["code"] == "task_blocked_cannot_transition"
assert detail["blocked_by_task_ids"] == [str(dep_id)]
reloaded = (await session.exec(select(Task).where(col(Task.id) == task_id))).first()
assert reloaded is not None
assert reloaded.status == "review"
finally:
await engine.dispose()

View File

@@ -0,0 +1,201 @@
# ruff: noqa: INP001
"""Webhook queue and dispatch worker tests."""
from __future__ import annotations
from datetime import UTC, datetime
from uuid import UUID, uuid4
import pytest
from app.services.webhooks import dispatch
from app.services.webhooks.queue import (
QueuedWebhookDelivery,
dequeue_webhook_delivery,
enqueue_webhook_delivery,
requeue_if_failed,
)
class _FakeRedis:
def __init__(self) -> None:
self.values: list[str] = []
def lpush(self, key: str, value: str) -> None:
self.values.insert(0, value)
def rpop(self, key: str) -> str | None:
if not self.values:
return None
return self.values.pop()
@pytest.mark.parametrize("attempts", [0, 1, 2])
def test_webhook_queue_roundtrip(monkeypatch: pytest.MonkeyPatch, attempts: int) -> None:
fake = _FakeRedis()
def _fake_redis() -> _FakeRedis:
return fake
board_id = uuid4()
webhook_id = uuid4()
payload_id = uuid4()
payload = QueuedWebhookDelivery(
board_id=board_id,
webhook_id=webhook_id,
payload_id=payload_id,
payload_event="push",
received_at=datetime.now(UTC),
attempts=attempts,
)
monkeypatch.setattr("app.services.webhooks.queue._redis_client", _fake_redis)
assert enqueue_webhook_delivery(payload)
dequeued = dequeue_webhook_delivery()
assert dequeued is not None
assert dequeued.board_id == board_id
assert dequeued.webhook_id == webhook_id
assert dequeued.payload_id == payload_id
assert dequeued.payload_event == "push"
assert dequeued.attempts == attempts
@pytest.mark.parametrize("attempts", [0, 1, 2, 3])
def test_requeue_respects_retry_cap(monkeypatch: pytest.MonkeyPatch, attempts: int) -> None:
fake = _FakeRedis()
def _fake_redis() -> _FakeRedis:
return fake
monkeypatch.setattr("app.services.webhooks.queue._redis_client", _fake_redis)
payload = QueuedWebhookDelivery(
board_id=uuid4(),
webhook_id=uuid4(),
payload_id=uuid4(),
payload_event="push",
received_at=datetime.now(UTC),
attempts=attempts,
)
if attempts >= 3:
assert requeue_if_failed(payload) is False
assert fake.values == []
else:
assert requeue_if_failed(payload) is True
requeued = dequeue_webhook_delivery()
assert requeued is not None
assert requeued.attempts == attempts + 1
class _FakeQueuedItem:
def __init__(self, attempts: int = 0) -> None:
self.payload_id = uuid4()
self.webhook_id = uuid4()
self.board_id = uuid4()
self.attempts = attempts
def _patch_dequeue(monkeypatch: pytest.MonkeyPatch, items: list[QueuedWebhookDelivery | None]) -> None:
def _dequeue() -> QueuedWebhookDelivery | None:
if not items:
return None
return items.pop(0)
monkeypatch.setattr(dispatch, "dequeue_webhook_delivery", _dequeue)
@pytest.mark.asyncio
async def test_dispatch_flush_processes_items_and_throttles(monkeypatch: pytest.MonkeyPatch) -> None:
items: list[QueuedWebhookDelivery | None] = [
_FakeQueuedItem(),
_FakeQueuedItem(),
None,
]
_patch_dequeue(monkeypatch, items)
processed: list[UUID] = []
throttles: list[float] = []
async def _process(item: QueuedWebhookDelivery) -> None:
processed.append(item.payload_id)
monkeypatch.setattr(dispatch, "_process_single_item", _process)
monkeypatch.setattr(dispatch.settings, "webhook_dispatch_throttle_seconds", 0)
monkeypatch.setattr(dispatch.time, "sleep", lambda seconds: throttles.append(seconds))
await dispatch.flush_webhook_delivery_queue()
assert len(processed) == 2
assert throttles == [0.0, 0.0]
@pytest.mark.asyncio
async def test_dispatch_flush_requeues_on_process_error(monkeypatch: pytest.MonkeyPatch) -> None:
item = _FakeQueuedItem()
_patch_dequeue(monkeypatch, [item, None])
async def _process(_: QueuedWebhookDelivery) -> None:
raise RuntimeError("boom")
requeued: list[QueuedWebhookDelivery] = []
def _requeue(payload: QueuedWebhookDelivery) -> bool:
requeued.append(payload)
return True
monkeypatch.setattr(dispatch, "_process_single_item", _process)
monkeypatch.setattr(dispatch, "requeue_if_failed", _requeue)
monkeypatch.setattr(dispatch.settings, "webhook_dispatch_throttle_seconds", 0)
monkeypatch.setattr(dispatch.time, "sleep", lambda seconds: None)
await dispatch.flush_webhook_delivery_queue()
assert len(requeued) == 1
assert requeued[0].payload_id == item.payload_id
@pytest.mark.asyncio
async def test_dispatch_flush_recovers_from_dequeue_error(monkeypatch: pytest.MonkeyPatch) -> None:
item = _FakeQueuedItem()
call_count = 0
def _dequeue() -> QueuedWebhookDelivery | None:
nonlocal call_count
call_count += 1
if call_count == 1:
raise RuntimeError("dequeue broken")
if call_count == 2:
return item
return None
monkeypatch.setattr(dispatch, "dequeue_webhook_delivery", _dequeue)
processed = 0
async def _process(_: QueuedWebhookDelivery) -> None:
nonlocal processed
processed += 1
monkeypatch.setattr(dispatch, "_process_single_item", _process)
monkeypatch.setattr(dispatch.settings, "webhook_dispatch_throttle_seconds", 0)
monkeypatch.setattr(dispatch.time, "sleep", lambda seconds: None)
await dispatch.flush_webhook_delivery_queue()
assert call_count == 3
assert processed == 1
def test_dispatch_run_entrypoint_calls_async_flush(monkeypatch: pytest.MonkeyPatch) -> None:
called: list[bool] = []
async def _flush() -> None:
called.append(True)
monkeypatch.setattr(dispatch, "flush_webhook_delivery_queue", _flush)
dispatch.run_flush_webhook_delivery_queue()
assert called == [True]

View File

@@ -0,0 +1,71 @@
# ruff: noqa: INP001
from __future__ import annotations
import pytest
from app.services.webhooks import dispatch
@pytest.mark.parametrize(
("payload_event", "payload_value", "expected"),
[
("check_run", {"action": "completed", "check_run": {"status": "completed", "conclusion": "success"}}, True),
("check_run", {"action": "completed", "check_run": {"status": "completed", "conclusion": None}}, True),
("check_run", {"action": "created", "check_run": {"status": "queued"}}, True),
("check_run", {"action": "completed", "check_run": {"status": "completed", "conclusion": "failure"}}, False),
(
"workflow_run",
{"action": "completed", "workflow_run": {"status": "completed", "conclusion": "success"}},
True,
),
(
"workflow_run",
{"action": "completed", "workflow_run": {"status": "completed", "conclusion": "cancelled"}},
False,
),
(
"check_suite",
{"action": "completed", "check_suite": {"status": "completed", "conclusion": "timed_out"}},
False,
),
(
"check_suite",
{"action": "completed", "check_suite": {"status": "completed", "conclusion": "neutral"}},
True,
),
# Non-target events should not be suppressed by this helper.
("pull_request", {"action": "opened"}, False),
(None, {"action": "opened"}, False),
# Non-dict payloads: don't suppress (we can't reason about it).
("check_run", "raw", False),
],
)
def test_should_suppress_routine_delivery(
monkeypatch: pytest.MonkeyPatch,
payload_event: str | None,
payload_value: object,
expected: bool,
) -> None:
monkeypatch.setattr(dispatch.settings, "webhook_dispatch_suppress_routine_events", True)
assert (
dispatch._should_suppress_routine_delivery(
payload_event=payload_event,
payload_value=payload_value,
)
is expected
)
def test_suppression_disabled_via_settings(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(dispatch.settings, "webhook_dispatch_suppress_routine_events", False)
assert (
dispatch._should_suppress_routine_delivery(
payload_event="check_run",
payload_value={
"action": "completed",
"check_run": {"status": "completed", "conclusion": "success"},
},
)
is False
)

105
backend/uv.lock generated
View File

@@ -279,6 +279,25 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" },
]
[[package]]
name = "croniter"
version = "6.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
{ name = "pytz" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" },
]
[[package]]
name = "crontab"
version = "1.0.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d6/36/a255b6f5a2e22df03fd2b2f3088974b44b8c9e9407e26b44742cb7cfbf5b/crontab-1.0.5.tar.gz", hash = "sha256:f80e01b4f07219763a9869f926dd17147278e7965a928089bca6d3dc80ae46d5", size = 21963, upload-time = "2025-07-09T17:09:38.264Z" }
[[package]]
name = "cryptography"
version = "45.0.7"
@@ -358,6 +377,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/9f/56/13ab06b4f93ca7cac71078fbe37fcea175d3216f31f85c3168a6bbd0bb9a/flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e", size = 57922, upload-time = "2025-06-20T19:31:34.425Z" },
]
[[package]]
name = "freezegun"
version = "1.5.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/95/dd/23e2f4e357f8fd3bdff613c1fe4466d21bfb00a6177f238079b17f7b1c84/freezegun-1.5.5.tar.gz", hash = "sha256:ac7742a6cc6c25a2c35e9292dfd554b897b517d2dec26891a2e8debf205cb94a", size = 35914, upload-time = "2025-08-09T10:39:08.338Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5e/2e/b41d8a1a917d6581fc27a35d05561037b048e47df50f27f8ac9c7e27a710/freezegun-1.5.5-py3-none-any.whl", hash = "sha256:cd557f4a75cf074e84bc374249b9dd491eaeacd61376b9eb3c423282211619d2", size = 19266, upload-time = "2025-08-09T10:39:06.636Z" },
]
[[package]]
name = "greenlet"
version = "3.3.1"
@@ -697,6 +728,9 @@ dependencies = [
{ name = "psycopg", extra = ["binary"] },
{ name = "pydantic-settings" },
{ name = "python-dotenv" },
{ name = "redis" },
{ name = "rq" },
{ name = "rq-scheduler" },
{ name = "sqlalchemy", extra = ["asyncio"] },
{ name = "sqlmodel" },
{ name = "sse-starlette" },
@@ -739,6 +773,9 @@ requires-dist = [
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = "==1.3.0" },
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = "==7.0.0" },
{ name = "python-dotenv", specifier = "==1.2.1" },
{ name = "redis", specifier = "==6.3.0" },
{ name = "rq", specifier = "==2.6.0" },
{ name = "rq-scheduler", specifier = "==0.14.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = "==0.15.0" },
{ name = "sqlalchemy", extras = ["asyncio"], specifier = "==2.0.46" },
{ name = "sqlmodel", specifier = "==0.0.32" },
@@ -1030,6 +1067,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
]
[[package]]
name = "python-dotenv"
version = "1.2.1"
@@ -1068,6 +1117,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" },
]
[[package]]
name = "pytz"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
]
[[package]]
name = "pyyaml"
version = "6.0.3"
@@ -1114,6 +1172,44 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
]
[[package]]
name = "redis"
version = "6.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/21/cd/030274634a1a052b708756016283ea3d84e91ae45f74d7f5dcf55d753a0f/redis-6.3.0.tar.gz", hash = "sha256:3000dbe532babfb0999cdab7b3e5744bcb23e51923febcfaeb52c8cfb29632ef", size = 4647275, upload-time = "2025-08-05T08:12:31.648Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/df/a7/2fe45801534a187543fc45d28b3844d84559c1589255bc2ece30d92dc205/redis-6.3.0-py3-none-any.whl", hash = "sha256:92f079d656ded871535e099080f70fab8e75273c0236797126ac60242d638e9b", size = 280018, upload-time = "2025-08-05T08:12:30.093Z" },
]
[[package]]
name = "rq"
version = "2.6.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "croniter" },
{ name = "redis" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8e/f5/46e39abc46ff6ff4f3151ee4fd2c1bf7601a8d26bd30fd951c5496b1e6c6/rq-2.6.0.tar.gz", hash = "sha256:92ad55676cda14512c4eea5782f398a102dc3af108bea197c868c4c50c5d3e81", size = 675315, upload-time = "2025-09-06T03:15:12.854Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/66/6cf141584526e3ed5b57a194e09cbdf7058334bd3926bb3f96e2453cf053/rq-2.6.0-py3-none-any.whl", hash = "sha256:be5ccc0f0fc5f32da0999648340e31476368f08067f0c3fce6768d00064edbb5", size = 112533, upload-time = "2025-09-06T03:15:09.894Z" },
]
[[package]]
name = "rq-scheduler"
version = "0.14.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "crontab" },
{ name = "freezegun" },
{ name = "python-dateutil" },
{ name = "rq" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a0/4e/977bbcc1f3b25ed9ea60ec968b13f7147661defe5b2f9272b44fdb1c5549/rq-scheduler-0.14.0.tar.gz", hash = "sha256:2d5a14a1ab217f8693184ebaa1fe03838edcbc70b4f76572721c0b33058cd023", size = 16582, upload-time = "2024-10-29T13:30:32.641Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bb/d0/28cedca9f3b321f30e69d644c2dcd7097ec21570ec9606fde56750621300/rq_scheduler-0.14.0-py2.py3-none-any.whl", hash = "sha256:d4ec221a3d8c11b3ff55e041f09d9af1e17f3253db737b6b97e86ab20fc3dc0d", size = 13874, upload-time = "2024-10-29T13:30:30.449Z" },
]
[[package]]
name = "ruff"
version = "0.15.0"
@@ -1139,6 +1235,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" },
]
[[package]]
name = "six"
version = "1.17.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
]
[[package]]
name = "sqlalchemy"
version = "2.0.46"

View File

@@ -17,6 +17,11 @@ services:
timeout: 3s
retries: 20
redis:
image: redis:7-alpine
ports:
- "${REDIS_PORT:-6379}:6379"
backend:
build:
# Build from repo root so the backend image can include repo-level assets
@@ -32,9 +37,12 @@ services:
DB_AUTO_MIGRATE: ${DB_AUTO_MIGRATE:-true}
AUTH_MODE: ${AUTH_MODE}
LOCAL_AUTH_TOKEN: ${LOCAL_AUTH_TOKEN}
WEBHOOK_REDIS_URL: redis://redis:6379/0
depends_on:
db:
condition: service_healthy
redis:
condition: service_started
ports:
- "${BACKEND_PORT:-8000}:8000"
@@ -58,5 +66,46 @@ services:
ports:
- "${FRONTEND_PORT:-3000}:3000"
webhook-worker:
build:
context: .
dockerfile: backend/Dockerfile
command: ["rq", "worker", "webhook-dispatch", "-u", "redis://redis:6379/0"]
env_file:
- ./backend/.env.example
depends_on:
redis:
condition: service_started
db:
condition: service_healthy
environment:
DATABASE_URL: postgresql+psycopg://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@db:5432/${POSTGRES_DB:-mission_control}
AUTH_MODE: ${AUTH_MODE}
LOCAL_AUTH_TOKEN: ${LOCAL_AUTH_TOKEN}
WEBHOOK_REDIS_URL: redis://redis:6379/0
WEBHOOK_QUEUE_NAME: webhook-dispatch
WEBHOOK_DISPATCH_SCHEDULE_ID: webhook-dispatch-batch
restart: unless-stopped
webhook-dispatch-cron:
build:
context: .
dockerfile: backend/Dockerfile
command:
- sh
- -c
- |
python -c "from app.services.webhooks.scheduler import bootstrap_webhook_dispatch_schedule; bootstrap_webhook_dispatch_schedule()" && \
rqscheduler -u redis://redis:6379/0 -i 60
depends_on:
- redis
- webhook-worker
environment:
WEBHOOK_REDIS_URL: redis://redis:6379/0
WEBHOOK_QUEUE_NAME: webhook-dispatch
WEBHOOK_DISPATCH_SCHEDULE_ID: webhook-dispatch-batch
WEBHOOK_DISPATCH_SCHEDULE_INTERVAL_SECONDS: 900
restart: unless-stopped
volumes:
postgres_data:

25
docs/installer-support.md Normal file
View File

@@ -0,0 +1,25 @@
# Installer platform support
This document defines current support status for `./install.sh`.
## Support states
- **Stable**: full tested path in CI and expected to work end-to-end.
- **Scaffolded**: distro is detected and actionable install guidance is provided, but full automatic package installation is not implemented yet.
- **Unsupported**: distro/package manager is not detected by installer.
## Current matrix
| Distro family | Package manager | State | Notes |
|---|---|---|---|
| Debian / Ubuntu | `apt` | **Stable** | Full automatic dependency install path. |
| Fedora / RHEL / CentOS | `dnf` / `yum` | **Scaffolded** | Detection + actionable commands present; auto-install path is TODO. |
| openSUSE | `zypper` | **Scaffolded** | Detection + actionable commands present; auto-install path is TODO. |
| Arch Linux | `pacman` | **Scaffolded** | Detection + actionable commands present; auto-install path is TODO. |
| Other Linux distros | unknown | **Unsupported** | Installer exits with package-manager guidance requirement. |
## Guard rails
- Debian/Ubuntu behavior must remain stable for every portability PR.
- New distro support should be added behind explicit package-manager adapters and tests.
- If a distro is scaffolded but not fully automated, installer should fail fast with actionable manual commands (not generic errors).

9
frontend/.dockerignore Normal file
View File

@@ -0,0 +1,9 @@
node_modules
.next
coverage
cypress/screenshots
cypress/videos
npm-debug.log*
.env
.env.*
.git

File diff suppressed because it is too large Load Diff

View File

@@ -364,6 +364,134 @@ export const useCreateAgentApiV1AgentsPost = <
queryClient,
);
};
/**
* Heartbeat an existing agent or create/provision one if needed.
* @summary Heartbeat Or Create Agent
*/
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse200 = {
data: AgentRead;
status: 200;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseSuccess =
heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse200 & {
headers: Headers;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseError =
heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse422 & {
headers: Headers;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse =
| heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseSuccess
| heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseError;
export const getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostUrl = () => {
return `/api/v1/agents/heartbeat`;
};
export const heartbeatOrCreateAgentApiV1AgentsHeartbeatPost = async (
agentHeartbeatCreate: AgentHeartbeatCreate,
options?: RequestInit,
): Promise<heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse> => {
return customFetch<heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse>(
getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostUrl(),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(agentHeartbeatCreate),
},
);
};
export const getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>
>,
TError,
{ data: AgentHeartbeatCreate },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>>,
TError,
{ data: AgentHeartbeatCreate },
TContext
> => {
const mutationKey = ["heartbeatOrCreateAgentApiV1AgentsHeartbeatPost"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>
>,
{ data: AgentHeartbeatCreate }
> = (props) => {
const { data } = props ?? {};
return heartbeatOrCreateAgentApiV1AgentsHeartbeatPost(
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type HeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationResult =
NonNullable<
Awaited<ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>>
>;
export type HeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationBody =
AgentHeartbeatCreate;
export type HeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationError =
HTTPValidationError;
/**
* @summary Heartbeat Or Create Agent
*/
export const useHeartbeatOrCreateAgentApiV1AgentsHeartbeatPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>
>,
TError,
{ data: AgentHeartbeatCreate },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>>,
TError,
{ data: AgentHeartbeatCreate },
TContext
> => {
return useMutation(
getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationOptions(options),
queryClient,
);
};
/**
* Stream agent updates as SSE events.
* @summary Stream Agents
@@ -576,6 +704,123 @@ export function useStreamAgentsApiV1AgentsStreamGet<
return { ...query, queryKey: queryOptions.queryKey };
}
/**
* Delete an agent and clean related task state.
* @summary Delete Agent
*/
export type deleteAgentApiV1AgentsAgentIdDeleteResponse200 = {
data: OkResponse;
status: 200;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponseSuccess =
deleteAgentApiV1AgentsAgentIdDeleteResponse200 & {
headers: Headers;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponseError =
deleteAgentApiV1AgentsAgentIdDeleteResponse422 & {
headers: Headers;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponse =
| deleteAgentApiV1AgentsAgentIdDeleteResponseSuccess
| deleteAgentApiV1AgentsAgentIdDeleteResponseError;
export const getDeleteAgentApiV1AgentsAgentIdDeleteUrl = (agentId: string) => {
return `/api/v1/agents/${agentId}`;
};
export const deleteAgentApiV1AgentsAgentIdDelete = async (
agentId: string,
options?: RequestInit,
): Promise<deleteAgentApiV1AgentsAgentIdDeleteResponse> => {
return customFetch<deleteAgentApiV1AgentsAgentIdDeleteResponse>(
getDeleteAgentApiV1AgentsAgentIdDeleteUrl(agentId),
{
...options,
method: "DELETE",
},
);
};
export const getDeleteAgentApiV1AgentsAgentIdDeleteMutationOptions = <
TError = HTTPValidationError,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
> => {
const mutationKey = ["deleteAgentApiV1AgentsAgentIdDelete"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
{ agentId: string }
> = (props) => {
const { agentId } = props ?? {};
return deleteAgentApiV1AgentsAgentIdDelete(agentId, requestOptions);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteAgentApiV1AgentsAgentIdDeleteMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>
>;
export type DeleteAgentApiV1AgentsAgentIdDeleteMutationError =
HTTPValidationError;
/**
* @summary Delete Agent
*/
export const useDeleteAgentApiV1AgentsAgentIdDelete = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
> => {
return useMutation(
getDeleteAgentApiV1AgentsAgentIdDeleteMutationOptions(options),
queryClient,
);
};
/**
* Get a single agent by id.
* @summary Get Agent
@@ -937,123 +1182,6 @@ export const useUpdateAgentApiV1AgentsAgentIdPatch = <
queryClient,
);
};
/**
* Delete an agent and clean related task state.
* @summary Delete Agent
*/
export type deleteAgentApiV1AgentsAgentIdDeleteResponse200 = {
data: OkResponse;
status: 200;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponseSuccess =
deleteAgentApiV1AgentsAgentIdDeleteResponse200 & {
headers: Headers;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponseError =
deleteAgentApiV1AgentsAgentIdDeleteResponse422 & {
headers: Headers;
};
export type deleteAgentApiV1AgentsAgentIdDeleteResponse =
| deleteAgentApiV1AgentsAgentIdDeleteResponseSuccess
| deleteAgentApiV1AgentsAgentIdDeleteResponseError;
export const getDeleteAgentApiV1AgentsAgentIdDeleteUrl = (agentId: string) => {
return `/api/v1/agents/${agentId}`;
};
export const deleteAgentApiV1AgentsAgentIdDelete = async (
agentId: string,
options?: RequestInit,
): Promise<deleteAgentApiV1AgentsAgentIdDeleteResponse> => {
return customFetch<deleteAgentApiV1AgentsAgentIdDeleteResponse>(
getDeleteAgentApiV1AgentsAgentIdDeleteUrl(agentId),
{
...options,
method: "DELETE",
},
);
};
export const getDeleteAgentApiV1AgentsAgentIdDeleteMutationOptions = <
TError = HTTPValidationError,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
> => {
const mutationKey = ["deleteAgentApiV1AgentsAgentIdDelete"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
{ agentId: string }
> = (props) => {
const { agentId } = props ?? {};
return deleteAgentApiV1AgentsAgentIdDelete(agentId, requestOptions);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteAgentApiV1AgentsAgentIdDeleteMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>
>;
export type DeleteAgentApiV1AgentsAgentIdDeleteMutationError =
HTTPValidationError;
/**
* @summary Delete Agent
*/
export const useDeleteAgentApiV1AgentsAgentIdDelete = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof deleteAgentApiV1AgentsAgentIdDelete>>,
TError,
{ agentId: string },
TContext
> => {
return useMutation(
getDeleteAgentApiV1AgentsAgentIdDeleteMutationOptions(options),
queryClient,
);
};
/**
* Record a heartbeat for a specific agent.
* @summary Heartbeat Agent
@@ -1182,131 +1310,3 @@ export const useHeartbeatAgentApiV1AgentsAgentIdHeartbeatPost = <
queryClient,
);
};
/**
* Heartbeat an existing agent or create/provision one if needed.
* @summary Heartbeat Or Create Agent
*/
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse200 = {
data: AgentRead;
status: 200;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseSuccess =
heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse200 & {
headers: Headers;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseError =
heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse422 & {
headers: Headers;
};
export type heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse =
| heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseSuccess
| heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponseError;
export const getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostUrl = () => {
return `/api/v1/agents/heartbeat`;
};
export const heartbeatOrCreateAgentApiV1AgentsHeartbeatPost = async (
agentHeartbeatCreate: AgentHeartbeatCreate,
options?: RequestInit,
): Promise<heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse> => {
return customFetch<heartbeatOrCreateAgentApiV1AgentsHeartbeatPostResponse>(
getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostUrl(),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(agentHeartbeatCreate),
},
);
};
export const getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>
>,
TError,
{ data: AgentHeartbeatCreate },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>>,
TError,
{ data: AgentHeartbeatCreate },
TContext
> => {
const mutationKey = ["heartbeatOrCreateAgentApiV1AgentsHeartbeatPost"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>
>,
{ data: AgentHeartbeatCreate }
> = (props) => {
const { data } = props ?? {};
return heartbeatOrCreateAgentApiV1AgentsHeartbeatPost(
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type HeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationResult =
NonNullable<
Awaited<ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>>
>;
export type HeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationBody =
AgentHeartbeatCreate;
export type HeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationError =
HTTPValidationError;
/**
* @summary Heartbeat Or Create Agent
*/
export const useHeartbeatOrCreateAgentApiV1AgentsHeartbeatPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>
>,
TError,
{ data: AgentHeartbeatCreate },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof heartbeatOrCreateAgentApiV1AgentsHeartbeatPost>>,
TError,
{ data: AgentHeartbeatCreate },
TContext
> => {
return useMutation(
getHeartbeatOrCreateAgentApiV1AgentsHeartbeatPostMutationOptions(options),
queryClient,
);
};

View File

@@ -801,7 +801,9 @@ export function useStreamBoardGroupMemoryApiV1BoardGroupsGroupIdMemoryStreamGet<
}
/**
* List memory entries for the board's linked group.
* List shared memory for the board's linked group.
Use this for cross-board context and coordination signals.
* @summary List Board Group Memory For Board
*/
export type listBoardGroupMemoryForBoardApiV1BoardsBoardIdGroupMemoryGetResponse200 =
@@ -1123,7 +1125,10 @@ export function useListBoardGroupMemoryForBoardApiV1BoardsBoardIdGroupMemoryGet<
}
/**
* Create a group memory entry from a board context and notify recipients.
* Create shared group memory from a board context.
When tags/mentions indicate chat or broadcast intent, eligible agents in the
linked group are notified.
* @summary Create Board Group Memory For Board
*/
export type createBoardGroupMemoryForBoardApiV1BoardsBoardIdGroupMemoryPostResponse200 =
@@ -1280,7 +1285,7 @@ export const useCreateBoardGroupMemoryForBoardApiV1BoardsBoardIdGroupMemoryPost
);
};
/**
* Stream memory entries for the board's linked group.
* Stream linked-group memory via SSE for near-real-time coordination.
* @summary Stream Board Group Memory For Board
*/
export type streamBoardGroupMemoryForBoardApiV1BoardsBoardIdGroupMemoryStreamGetResponse200 =

View File

@@ -369,6 +369,129 @@ export const useCreateBoardGroupApiV1BoardGroupsPost = <
queryClient,
);
};
/**
* Delete a board group.
* @summary Delete Board Group
*/
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse200 = {
data: OkResponse;
status: 200;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseSuccess =
deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse200 & {
headers: Headers;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseError =
deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse422 & {
headers: Headers;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse =
| deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseSuccess
| deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseError;
export const getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteUrl = (
groupId: string,
) => {
return `/api/v1/board-groups/${groupId}`;
};
export const deleteBoardGroupApiV1BoardGroupsGroupIdDelete = async (
groupId: string,
options?: RequestInit,
): Promise<deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse> => {
return customFetch<deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse>(
getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteUrl(groupId),
{
...options,
method: "DELETE",
},
);
};
export const getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationOptions = <
TError = HTTPValidationError,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
TError,
{ groupId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
TError,
{ groupId: string },
TContext
> => {
const mutationKey = ["deleteBoardGroupApiV1BoardGroupsGroupIdDelete"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
{ groupId: string }
> = (props) => {
const { groupId } = props ?? {};
return deleteBoardGroupApiV1BoardGroupsGroupIdDelete(
groupId,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationResult =
NonNullable<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>
>;
export type DeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationError =
HTTPValidationError;
/**
* @summary Delete Board Group
*/
export const useDeleteBoardGroupApiV1BoardGroupsGroupIdDelete = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
TError,
{ groupId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
TError,
{ groupId: string },
TContext
> => {
return useMutation(
getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationOptions(options),
queryClient,
);
};
/**
* Get a board group by id.
* @summary Get Board Group
@@ -707,125 +830,161 @@ export const useUpdateBoardGroupApiV1BoardGroupsGroupIdPatch = <
);
};
/**
* Delete a board group.
* @summary Delete Board Group
* Apply heartbeat settings to agents in a board group.
* @summary Apply Board Group Heartbeat
*/
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse200 = {
data: OkResponse;
status: 200;
};
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse200 =
{
data: BoardGroupHeartbeatApplyResult;
status: 200;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse422 =
{
data: HTTPValidationError;
status: 422;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseSuccess =
deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse200 & {
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseSuccess =
applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse200 & {
headers: Headers;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseError =
deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse422 & {
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseError =
applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse422 & {
headers: Headers;
};
export type deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse =
| deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseSuccess
| deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponseError;
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse =
export const getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteUrl = (
groupId: string,
) => {
return `/api/v1/board-groups/${groupId}`;
};
| applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseSuccess
| applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseError;
export const deleteBoardGroupApiV1BoardGroupsGroupIdDelete = async (
groupId: string,
options?: RequestInit,
): Promise<deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse> => {
return customFetch<deleteBoardGroupApiV1BoardGroupsGroupIdDeleteResponse>(
getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteUrl(groupId),
{
...options,
method: "DELETE",
},
);
};
export const getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostUrl =
(groupId: string) => {
return `/api/v1/board-groups/${groupId}/heartbeat`;
};
export const getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationOptions = <
TError = HTTPValidationError,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
TError,
{ groupId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
TError,
{ groupId: string },
TContext
> => {
const mutationKey = ["deleteBoardGroupApiV1BoardGroupsGroupIdDelete"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
{ groupId: string }
> = (props) => {
const { groupId } = props ?? {};
return deleteBoardGroupApiV1BoardGroupsGroupIdDelete(
groupId,
requestOptions,
export const applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost =
async (
groupId: string,
boardGroupHeartbeatApply: BoardGroupHeartbeatApply,
options?: RequestInit,
): Promise<applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse> => {
return customFetch<applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse>(
getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostUrl(
groupId,
),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(boardGroupHeartbeatApply),
},
);
};
return { mutationFn, ...mutationOptions };
};
export const getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
> => {
const mutationKey = [
"applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
export type DeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationResult =
const mutationFn: MutationFunction<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
{ groupId: string; data: BoardGroupHeartbeatApply }
> = (props) => {
const { groupId, data } = props ?? {};
return applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost(
groupId,
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type ApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationResult =
NonNullable<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>
>;
export type DeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationError =
export type ApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationBody =
BoardGroupHeartbeatApply;
export type ApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationError =
HTTPValidationError;
/**
* @summary Delete Board Group
* @summary Apply Board Group Heartbeat
*/
export const useDeleteBoardGroupApiV1BoardGroupsGroupIdDelete = <
export const useApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string },
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof deleteBoardGroupApiV1BoardGroupsGroupIdDelete>>,
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string },
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
> => {
return useMutation(
getDeleteBoardGroupApiV1BoardGroupsGroupIdDeleteMutationOptions(options),
getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationOptions(
options,
),
queryClient,
);
};
@@ -1131,163 +1290,3 @@ export function useGetBoardGroupSnapshotApiV1BoardGroupsGroupIdSnapshotGet<
return { ...query, queryKey: queryOptions.queryKey };
}
/**
* Apply heartbeat settings to agents in a board group.
* @summary Apply Board Group Heartbeat
*/
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse200 =
{
data: BoardGroupHeartbeatApplyResult;
status: 200;
};
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse422 =
{
data: HTTPValidationError;
status: 422;
};
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseSuccess =
applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse200 & {
headers: Headers;
};
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseError =
applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse422 & {
headers: Headers;
};
export type applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse =
| applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseSuccess
| applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponseError;
export const getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostUrl =
(groupId: string) => {
return `/api/v1/board-groups/${groupId}/heartbeat`;
};
export const applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost =
async (
groupId: string,
boardGroupHeartbeatApply: BoardGroupHeartbeatApply,
options?: RequestInit,
): Promise<applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse> => {
return customFetch<applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostResponse>(
getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostUrl(
groupId,
),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(boardGroupHeartbeatApply),
},
);
};
export const getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
> => {
const mutationKey = [
"applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
{ groupId: string; data: BoardGroupHeartbeatApply }
> = (props) => {
const { groupId, data } = props ?? {};
return applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost(
groupId,
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type ApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationResult =
NonNullable<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>
>;
export type ApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationBody =
BoardGroupHeartbeatApply;
export type ApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationError =
HTTPValidationError;
/**
* @summary Apply Board Group Heartbeat
*/
export const useApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<
ReturnType<
typeof applyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPost
>
>,
TError,
{ groupId: string; data: BoardGroupHeartbeatApply },
TContext
> => {
return useMutation(
getApplyBoardGroupHeartbeatApiV1BoardGroupsGroupIdHeartbeatPostMutationOptions(
options,
),
queryClient,
);
};

View File

@@ -274,298 +274,6 @@ export function useGetOnboardingApiV1BoardsBoardIdOnboardingGet<
return { ...query, queryKey: queryOptions.queryKey };
}
/**
* Start onboarding and send instructions to the gateway agent.
* @summary Start Onboarding
*/
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse200 = {
data: BoardOnboardingRead;
status: 200;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseSuccess =
startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse200 & {
headers: Headers;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseError =
startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse422 & {
headers: Headers;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse =
| startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseSuccess
| startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseError;
export const getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostUrl = (
boardId: string,
) => {
return `/api/v1/boards/${boardId}/onboarding/start`;
};
export const startOnboardingApiV1BoardsBoardIdOnboardingStartPost = async (
boardId: string,
boardOnboardingStart: BoardOnboardingStart,
options?: RequestInit,
): Promise<startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse> => {
return customFetch<startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse>(
getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostUrl(boardId),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(boardOnboardingStart),
},
);
};
export const getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
> => {
const mutationKey = [
"startOnboardingApiV1BoardsBoardIdOnboardingStartPost",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
{ boardId: string; data: BoardOnboardingStart }
> = (props) => {
const { boardId, data } = props ?? {};
return startOnboardingApiV1BoardsBoardIdOnboardingStartPost(
boardId,
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type StartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationResult =
NonNullable<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>
>;
export type StartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationBody =
BoardOnboardingStart;
export type StartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationError =
HTTPValidationError;
/**
* @summary Start Onboarding
*/
export const useStartOnboardingApiV1BoardsBoardIdOnboardingStartPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
> => {
return useMutation(
getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationOptions(
options,
),
queryClient,
);
};
/**
* Send a user onboarding answer to the gateway agent.
* @summary Answer Onboarding
*/
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse200 =
{
data: BoardOnboardingRead;
status: 200;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse422 =
{
data: HTTPValidationError;
status: 422;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseSuccess =
answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse200 & {
headers: Headers;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseError =
answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse422 & {
headers: Headers;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse =
| answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseSuccess
| answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseError;
export const getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostUrl = (
boardId: string,
) => {
return `/api/v1/boards/${boardId}/onboarding/answer`;
};
export const answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost = async (
boardId: string,
boardOnboardingAnswer: BoardOnboardingAnswer,
options?: RequestInit,
): Promise<answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse> => {
return customFetch<answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse>(
getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostUrl(boardId),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(boardOnboardingAnswer),
},
);
};
export const getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost
>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
> => {
const mutationKey = [
"answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<
typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost
>
>,
{ boardId: string; data: BoardOnboardingAnswer }
> = (props) => {
const { boardId, data } = props ?? {};
return answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost(
boardId,
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type AnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationResult =
NonNullable<
Awaited<
ReturnType<typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost>
>
>;
export type AnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationBody =
BoardOnboardingAnswer;
export type AnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationError =
HTTPValidationError;
/**
* @summary Answer Onboarding
*/
export const useAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost
>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<
ReturnType<typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
> => {
return useMutation(
getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationOptions(
options,
),
queryClient,
);
};
/**
* Store onboarding updates submitted by the gateway agent.
* @summary Agent Onboarding Update
@@ -741,6 +449,156 @@ export const useAgentOnboardingUpdateApiV1BoardsBoardIdOnboardingAgentPost = <
queryClient,
);
};
/**
* Send a user onboarding answer to the gateway agent.
* @summary Answer Onboarding
*/
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse200 =
{
data: BoardOnboardingRead;
status: 200;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse422 =
{
data: HTTPValidationError;
status: 422;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseSuccess =
answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse200 & {
headers: Headers;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseError =
answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse422 & {
headers: Headers;
};
export type answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse =
| answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseSuccess
| answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponseError;
export const getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostUrl = (
boardId: string,
) => {
return `/api/v1/boards/${boardId}/onboarding/answer`;
};
export const answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost = async (
boardId: string,
boardOnboardingAnswer: BoardOnboardingAnswer,
options?: RequestInit,
): Promise<answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse> => {
return customFetch<answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostResponse>(
getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostUrl(boardId),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(boardOnboardingAnswer),
},
);
};
export const getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost
>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
> => {
const mutationKey = [
"answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<
typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost
>
>,
{ boardId: string; data: BoardOnboardingAnswer }
> = (props) => {
const { boardId, data } = props ?? {};
return answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost(
boardId,
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type AnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationResult =
NonNullable<
Awaited<
ReturnType<typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost>
>
>;
export type AnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationBody =
BoardOnboardingAnswer;
export type AnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationError =
HTTPValidationError;
/**
* @summary Answer Onboarding
*/
export const useAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<
typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost
>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<
ReturnType<typeof answerOnboardingApiV1BoardsBoardIdOnboardingAnswerPost>
>,
TError,
{ boardId: string; data: BoardOnboardingAnswer },
TContext
> => {
return useMutation(
getAnswerOnboardingApiV1BoardsBoardIdOnboardingAnswerPostMutationOptions(
options,
),
queryClient,
);
};
/**
* Confirm onboarding results and provision the board lead agent.
* @summary Confirm Onboarding
@@ -895,3 +753,145 @@ export const useConfirmOnboardingApiV1BoardsBoardIdOnboardingConfirmPost = <
queryClient,
);
};
/**
* Start onboarding and send instructions to the gateway agent.
* @summary Start Onboarding
*/
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse200 = {
data: BoardOnboardingRead;
status: 200;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseSuccess =
startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse200 & {
headers: Headers;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseError =
startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse422 & {
headers: Headers;
};
export type startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse =
| startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseSuccess
| startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponseError;
export const getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostUrl = (
boardId: string,
) => {
return `/api/v1/boards/${boardId}/onboarding/start`;
};
export const startOnboardingApiV1BoardsBoardIdOnboardingStartPost = async (
boardId: string,
boardOnboardingStart: BoardOnboardingStart,
options?: RequestInit,
): Promise<startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse> => {
return customFetch<startOnboardingApiV1BoardsBoardIdOnboardingStartPostResponse>(
getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostUrl(boardId),
{
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options?.headers },
body: JSON.stringify(boardOnboardingStart),
},
);
};
export const getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationOptions =
<TError = HTTPValidationError, TContext = unknown>(options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
> => {
const mutationKey = [
"startOnboardingApiV1BoardsBoardIdOnboardingStartPost",
];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
{ boardId: string; data: BoardOnboardingStart }
> = (props) => {
const { boardId, data } = props ?? {};
return startOnboardingApiV1BoardsBoardIdOnboardingStartPost(
boardId,
data,
requestOptions,
);
};
return { mutationFn, ...mutationOptions };
};
export type StartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationResult =
NonNullable<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>
>;
export type StartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationBody =
BoardOnboardingStart;
export type StartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationError =
HTTPValidationError;
/**
* @summary Start Onboarding
*/
export const useStartOnboardingApiV1BoardsBoardIdOnboardingStartPost = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<
ReturnType<typeof startOnboardingApiV1BoardsBoardIdOnboardingStartPost>
>,
TError,
{ boardId: string; data: BoardOnboardingStart },
TContext
> => {
return useMutation(
getStartOnboardingApiV1BoardsBoardIdOnboardingStartPostMutationOptions(
options,
),
queryClient,
);
};

View File

@@ -363,6 +363,123 @@ export const useCreateBoardApiV1BoardsPost = <
queryClient,
);
};
/**
* Delete a board and all dependent records.
* @summary Delete Board
*/
export type deleteBoardApiV1BoardsBoardIdDeleteResponse200 = {
data: OkResponse;
status: 200;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponseSuccess =
deleteBoardApiV1BoardsBoardIdDeleteResponse200 & {
headers: Headers;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponseError =
deleteBoardApiV1BoardsBoardIdDeleteResponse422 & {
headers: Headers;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponse =
| deleteBoardApiV1BoardsBoardIdDeleteResponseSuccess
| deleteBoardApiV1BoardsBoardIdDeleteResponseError;
export const getDeleteBoardApiV1BoardsBoardIdDeleteUrl = (boardId: string) => {
return `/api/v1/boards/${boardId}`;
};
export const deleteBoardApiV1BoardsBoardIdDelete = async (
boardId: string,
options?: RequestInit,
): Promise<deleteBoardApiV1BoardsBoardIdDeleteResponse> => {
return customFetch<deleteBoardApiV1BoardsBoardIdDeleteResponse>(
getDeleteBoardApiV1BoardsBoardIdDeleteUrl(boardId),
{
...options,
method: "DELETE",
},
);
};
export const getDeleteBoardApiV1BoardsBoardIdDeleteMutationOptions = <
TError = HTTPValidationError,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
> => {
const mutationKey = ["deleteBoardApiV1BoardsBoardIdDelete"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
{ boardId: string }
> = (props) => {
const { boardId } = props ?? {};
return deleteBoardApiV1BoardsBoardIdDelete(boardId, requestOptions);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteBoardApiV1BoardsBoardIdDeleteMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>
>;
export type DeleteBoardApiV1BoardsBoardIdDeleteMutationError =
HTTPValidationError;
/**
* @summary Delete Board
*/
export const useDeleteBoardApiV1BoardsBoardIdDelete = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
> => {
return useMutation(
getDeleteBoardApiV1BoardsBoardIdDeleteMutationOptions(options),
queryClient,
);
};
/**
* Get a board by id.
* @summary Get Board
@@ -683,364 +800,10 @@ export const useUpdateBoardApiV1BoardsBoardIdPatch = <
queryClient,
);
};
/**
* Delete a board and all dependent records.
* @summary Delete Board
*/
export type deleteBoardApiV1BoardsBoardIdDeleteResponse200 = {
data: OkResponse;
status: 200;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponseSuccess =
deleteBoardApiV1BoardsBoardIdDeleteResponse200 & {
headers: Headers;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponseError =
deleteBoardApiV1BoardsBoardIdDeleteResponse422 & {
headers: Headers;
};
export type deleteBoardApiV1BoardsBoardIdDeleteResponse =
| deleteBoardApiV1BoardsBoardIdDeleteResponseSuccess
| deleteBoardApiV1BoardsBoardIdDeleteResponseError;
export const getDeleteBoardApiV1BoardsBoardIdDeleteUrl = (boardId: string) => {
return `/api/v1/boards/${boardId}`;
};
export const deleteBoardApiV1BoardsBoardIdDelete = async (
boardId: string,
options?: RequestInit,
): Promise<deleteBoardApiV1BoardsBoardIdDeleteResponse> => {
return customFetch<deleteBoardApiV1BoardsBoardIdDeleteResponse>(
getDeleteBoardApiV1BoardsBoardIdDeleteUrl(boardId),
{
...options,
method: "DELETE",
},
);
};
export const getDeleteBoardApiV1BoardsBoardIdDeleteMutationOptions = <
TError = HTTPValidationError,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
> => {
const mutationKey = ["deleteBoardApiV1BoardsBoardIdDelete"];
const { mutation: mutationOptions, request: requestOptions } = options
? options.mutation &&
"mutationKey" in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey }, request: undefined };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
{ boardId: string }
> = (props) => {
const { boardId } = props ?? {};
return deleteBoardApiV1BoardsBoardIdDelete(boardId, requestOptions);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteBoardApiV1BoardsBoardIdDeleteMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>
>;
export type DeleteBoardApiV1BoardsBoardIdDeleteMutationError =
HTTPValidationError;
/**
* @summary Delete Board
*/
export const useDeleteBoardApiV1BoardsBoardIdDelete = <
TError = HTTPValidationError,
TContext = unknown,
>(
options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseMutationResult<
Awaited<ReturnType<typeof deleteBoardApiV1BoardsBoardIdDelete>>,
TError,
{ boardId: string },
TContext
> => {
return useMutation(
getDeleteBoardApiV1BoardsBoardIdDeleteMutationOptions(options),
queryClient,
);
};
/**
* Get a board snapshot view model.
* @summary Get Board Snapshot
*/
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse200 = {
data: BoardSnapshot;
status: 200;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseSuccess =
getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse200 & {
headers: Headers;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseError =
getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse422 & {
headers: Headers;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse =
| getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseSuccess
| getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseError;
export const getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetUrl = (
boardId: string,
) => {
return `/api/v1/boards/${boardId}/snapshot`;
};
export const getBoardSnapshotApiV1BoardsBoardIdSnapshotGet = async (
boardId: string,
options?: RequestInit,
): Promise<getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse> => {
return customFetch<getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse>(
getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetUrl(boardId),
{
...options,
method: "GET",
},
);
};
export const getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryKey = (
boardId: string,
) => {
return [`/api/v1/boards/${boardId}/snapshot`] as const;
};
export const getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryOptions = <
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
) => {
const { query: queryOptions, request: requestOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryKey(boardId);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>>
> = ({ signal }) =>
getBoardSnapshotApiV1BoardsBoardIdSnapshotGet(boardId, {
signal,
...requestOptions,
});
return {
queryKey,
queryFn,
enabled: !!boardId,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>>,
TError,
TData
> & { queryKey: DataTag<QueryKey, TData, TError> };
};
export type GetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryResult =
NonNullable<
Awaited<ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>>
>;
export type GetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryError =
HTTPValidationError;
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options: {
query: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
> &
Pick<
DefinedInitialDataOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>
>,
"initialData"
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): DefinedUseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
> &
Pick<
UndefinedInitialDataOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>
>,
"initialData"
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
/**
* @summary Get Board Snapshot
*/
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
} {
const queryOptions =
getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryOptions(
boardId,
options,
);
const query = useQuery(queryOptions, queryClient) as UseQueryResult<
TData,
TError
> & { queryKey: DataTag<QueryKey, TData, TError> };
return { ...query, queryKey: queryOptions.queryKey };
}
/**
* Get a grouped snapshot across related boards.
Returns high-signal cross-board status for dependency and overlap checks.
* @summary Get Board Group Snapshot
*/
export type getBoardGroupSnapshotApiV1BoardsBoardIdGroupSnapshotGetResponse200 =
@@ -1341,3 +1104,242 @@ export function useGetBoardGroupSnapshotApiV1BoardsBoardIdGroupSnapshotGet<
return { ...query, queryKey: queryOptions.queryKey };
}
/**
* Get a board snapshot view model.
* @summary Get Board Snapshot
*/
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse200 = {
data: BoardSnapshot;
status: 200;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse422 = {
data: HTTPValidationError;
status: 422;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseSuccess =
getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse200 & {
headers: Headers;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseError =
getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse422 & {
headers: Headers;
};
export type getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse =
| getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseSuccess
| getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponseError;
export const getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetUrl = (
boardId: string,
) => {
return `/api/v1/boards/${boardId}/snapshot`;
};
export const getBoardSnapshotApiV1BoardsBoardIdSnapshotGet = async (
boardId: string,
options?: RequestInit,
): Promise<getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse> => {
return customFetch<getBoardSnapshotApiV1BoardsBoardIdSnapshotGetResponse>(
getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetUrl(boardId),
{
...options,
method: "GET",
},
);
};
export const getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryKey = (
boardId: string,
) => {
return [`/api/v1/boards/${boardId}/snapshot`] as const;
};
export const getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryOptions = <
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
) => {
const { query: queryOptions, request: requestOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryKey(boardId);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>>
> = ({ signal }) =>
getBoardSnapshotApiV1BoardsBoardIdSnapshotGet(boardId, {
signal,
...requestOptions,
});
return {
queryKey,
queryFn,
enabled: !!boardId,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>>,
TError,
TData
> & { queryKey: DataTag<QueryKey, TData, TError> };
};
export type GetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryResult =
NonNullable<
Awaited<ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>>
>;
export type GetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryError =
HTTPValidationError;
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options: {
query: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
> &
Pick<
DefinedInitialDataOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>
>,
"initialData"
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): DefinedUseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
> &
Pick<
UndefinedInitialDataOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>
>,
"initialData"
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
};
/**
* @summary Get Board Snapshot
*/
export function useGetBoardSnapshotApiV1BoardsBoardIdSnapshotGet<
TData = Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError = HTTPValidationError,
>(
boardId: string,
options?: {
query?: Partial<
UseQueryOptions<
Awaited<
ReturnType<typeof getBoardSnapshotApiV1BoardsBoardIdSnapshotGet>
>,
TError,
TData
>
>;
request?: SecondParameter<typeof customFetch>;
},
queryClient?: QueryClient,
): UseQueryResult<TData, TError> & {
queryKey: DataTag<QueryKey, TData, TError>;
} {
const queryOptions =
getGetBoardSnapshotApiV1BoardsBoardIdSnapshotGetQueryOptions(
boardId,
options,
);
const query = useQuery(queryOptions, queryClient) as UseQueryResult<
TData,
TError
> & { queryKey: DataTag<QueryKey, TData, TError> };
return { ...query, queryKey: queryOptions.queryKey };
}

File diff suppressed because it is too large Load Diff

View File

@@ -9,10 +9,10 @@
* Serialized activity event payload returned by activity endpoints.
*/
export interface ActivityEventRead {
id: string;
event_type: string;
message: string | null;
agent_id: string | null;
task_id: string | null;
created_at: string;
event_type: string;
id: string;
message: string | null;
task_id: string | null;
}

View File

@@ -9,14 +9,14 @@
* Denormalized task-comment feed item enriched with task and board fields.
*/
export interface ActivityTaskCommentFeedItemRead {
id: string;
created_at: string;
message: string | null;
agent_id: string | null;
agent_name?: string | null;
agent_role?: string | null;
task_id: string;
task_title: string;
board_id: string;
board_name: string;
created_at: string;
id: string;
message: string | null;
task_id: string;
task_title: string;
}

View File

@@ -12,11 +12,11 @@ import type { AgentCreateIdentityProfile } from "./agentCreateIdentityProfile";
*/
export interface AgentCreate {
board_id?: string | null;
/** @minLength 1 */
name: string;
status?: string;
heartbeat_config?: AgentCreateHeartbeatConfig;
identity_profile?: AgentCreateIdentityProfile;
identity_template?: string | null;
/** @minLength 1 */
name: string;
soul_template?: string | null;
status?: string;
}

View File

@@ -9,8 +9,8 @@
* Heartbeat payload used to create an agent lazily.
*/
export interface AgentHeartbeatCreate {
status?: string | null;
board_id?: string | null;
/** @minLength 1 */
name: string;
board_id?: string | null;
status?: string | null;
}

View File

@@ -12,19 +12,19 @@ import type { AgentReadIdentityProfile } from "./agentReadIdentityProfile";
*/
export interface AgentRead {
board_id?: string | null;
/** @minLength 1 */
name: string;
status?: string;
created_at: string;
gateway_id: string;
heartbeat_config?: AgentReadHeartbeatConfig;
id: string;
identity_profile?: AgentReadIdentityProfile;
identity_template?: string | null;
soul_template?: string | null;
id: string;
gateway_id: string;
is_board_lead?: boolean;
is_gateway_main?: boolean;
openclaw_session_id?: string | null;
last_seen_at: string | null;
created_at: string;
/** @minLength 1 */
name: string;
openclaw_session_id?: string | null;
soul_template?: string | null;
status?: string;
updated_at: string;
}

View File

@@ -12,11 +12,11 @@ import type { AgentUpdateIdentityProfile } from "./agentUpdateIdentityProfile";
*/
export interface AgentUpdate {
board_id?: string | null;
is_gateway_main?: boolean | null;
name?: string | null;
status?: string | null;
heartbeat_config?: AgentUpdateHeartbeatConfig;
identity_profile?: AgentUpdateIdentityProfile;
identity_template?: string | null;
is_gateway_main?: boolean | null;
name?: string | null;
soul_template?: string | null;
status?: string | null;
}

View File

@@ -13,15 +13,15 @@ import type { ApprovalCreateStatus } from "./approvalCreateStatus";
*/
export interface ApprovalCreate {
action_type: string;
task_id?: string | null;
task_ids?: string[];
payload?: ApprovalCreatePayload;
agent_id?: string | null;
/**
* @minimum 0
* @maximum 100
*/
confidence: number;
payload?: ApprovalCreatePayload;
rubric_scores?: ApprovalCreateRubricScores;
status?: ApprovalCreateStatus;
agent_id?: string | null;
task_id?: string | null;
task_ids?: string[];
}

View File

@@ -13,20 +13,20 @@ import type { ApprovalReadStatus } from "./approvalReadStatus";
*/
export interface ApprovalRead {
action_type: string;
task_id?: string | null;
task_ids?: string[];
payload?: ApprovalReadPayload;
agent_id?: string | null;
board_id: string;
/**
* @minimum 0
* @maximum 100
*/
confidence: number;
created_at: string;
id: string;
payload?: ApprovalReadPayload;
resolved_at?: string | null;
rubric_scores?: ApprovalReadRubricScores;
status?: ApprovalReadStatus;
id: string;
board_id: string;
task_id?: string | null;
task_ids?: string[];
task_titles?: string[];
agent_id?: string | null;
created_at: string;
resolved_at?: string | null;
}

View File

@@ -9,6 +9,6 @@
* Error detail payload listing blocking dependency task identifiers.
*/
export interface BlockedTaskDetail {
message: string;
blocked_by_task_ids?: string[];
message: string;
}

View File

@@ -10,18 +10,19 @@ import type { BoardCreateSuccessMetrics } from "./boardCreateSuccessMetrics";
* Payload for creating a board.
*/
export interface BoardCreate {
name: string;
slug: string;
description: string;
gateway_id?: string | null;
block_status_changes_with_pending_approval?: boolean;
board_group_id?: string | null;
board_type?: string;
objective?: string | null;
success_metrics?: BoardCreateSuccessMetrics;
target_date?: string | null;
description: string;
gateway_id?: string | null;
goal_confirmed?: boolean;
goal_source?: string | null;
name: string;
objective?: string | null;
only_lead_can_change_status?: boolean;
require_approval_for_done?: boolean;
require_review_before_done?: boolean;
block_status_changes_with_pending_approval?: boolean;
slug: string;
success_metrics?: BoardCreateSuccessMetrics;
target_date?: string | null;
}

View File

@@ -9,7 +9,7 @@
* Payload for creating a board group.
*/
export interface BoardGroupCreate {
description?: string | null;
name: string;
slug: string;
description?: string | null;
}

View File

@@ -10,6 +10,6 @@
*/
export interface BoardGroupHeartbeatApply {
every: string;
target?: string | null;
include_board_leads?: boolean;
target?: string | null;
}

View File

@@ -11,7 +11,7 @@ import type { BoardGroupHeartbeatApplyResultRequested } from "./boardGroupHeartb
*/
export interface BoardGroupHeartbeatApplyResult {
board_group_id: string;
failed_agent_ids: string[];
requested: BoardGroupHeartbeatApplyResultRequested;
updated_agent_ids: string[];
failed_agent_ids: string[];
}

View File

@@ -11,6 +11,6 @@
export interface BoardGroupMemoryCreate {
/** @minLength 1 */
content: string;
tags?: string[] | null;
source?: string | null;
tags?: string[] | null;
}

View File

@@ -9,11 +9,11 @@
* Serialized board-group memory entry returned from read endpoints.
*/
export interface BoardGroupMemoryRead {
id: string;
board_group_id: string;
content: string;
tags?: string[] | null;
source?: string | null;
is_chat?: boolean;
created_at: string;
id: string;
is_chat?: boolean;
source?: string | null;
tags?: string[] | null;
}

View File

@@ -9,11 +9,11 @@
* Board-group payload returned from read endpoints.
*/
export interface BoardGroupRead {
name: string;
slug: string;
created_at: string;
description?: string | null;
id: string;
name: string;
organization_id: string;
created_at: string;
slug: string;
updated_at: string;
}

View File

@@ -11,6 +11,6 @@ import type { BoardGroupRead } from "./boardGroupRead";
* Top-level board-group snapshot response payload.
*/
export interface BoardGroupSnapshot {
group?: BoardGroupRead | null;
boards?: BoardGroupBoardSnapshot[];
group?: BoardGroupRead | null;
}

View File

@@ -10,17 +10,17 @@ import type { TagRef } from "./tagRef";
* Task summary row used inside board-group snapshot responses.
*/
export interface BoardGroupTaskSummary {
id: string;
board_id: string;
board_name: string;
title: string;
status: string;
priority: string;
assigned_agent_id?: string | null;
assignee?: string | null;
due_at?: string | null;
in_progress_at?: string | null;
tags?: TagRef[];
board_id: string;
board_name: string;
created_at: string;
due_at?: string | null;
id: string;
in_progress_at?: string | null;
priority: string;
status: string;
tags?: TagRef[];
title: string;
updated_at: string;
}

View File

@@ -9,7 +9,7 @@
* Payload for partial board-group updates.
*/
export interface BoardGroupUpdate {
description?: string | null;
name?: string | null;
slug?: string | null;
description?: string | null;
}

View File

@@ -11,6 +11,6 @@
export interface BoardMemoryCreate {
/** @minLength 1 */
content: string;
tags?: string[] | null;
source?: string | null;
tags?: string[] | null;
}

View File

@@ -9,11 +9,11 @@
* Serialized board memory entry returned from read endpoints.
*/
export interface BoardMemoryRead {
id: string;
board_id: string;
content: string;
tags?: string[] | null;
source?: string | null;
is_chat?: boolean;
created_at: string;
id: string;
is_chat?: boolean;
source?: string | null;
tags?: string[] | null;
}

View File

@@ -13,10 +13,10 @@ import type { BoardOnboardingUserProfile } from "./boardOnboardingUserProfile";
*/
export interface BoardOnboardingAgentComplete {
board_type: string;
lead_agent?: BoardOnboardingLeadAgentDraft | null;
objective?: string | null;
status: "complete";
success_metrics?: BoardOnboardingAgentCompleteSuccessMetrics;
target_date?: string | null;
status: "complete";
user_profile?: BoardOnboardingUserProfile | null;
lead_agent?: BoardOnboardingLeadAgentDraft | null;
}

View File

@@ -10,8 +10,8 @@ import type { BoardOnboardingQuestionOption } from "./boardOnboardingQuestionOpt
* Question payload emitted by the onboarding assistant.
*/
export interface BoardOnboardingAgentQuestion {
/** @minLength 1 */
question: string;
/** @minItems 1 */
options: BoardOnboardingQuestionOption[];
/** @minLength 1 */
question: string;
}

View File

@@ -10,11 +10,11 @@ import type { BoardOnboardingLeadAgentDraftIdentityProfile } from "./boardOnboar
* Editable lead-agent draft configuration.
*/
export interface BoardOnboardingLeadAgentDraft {
name?: string | null;
identity_profile?: BoardOnboardingLeadAgentDraftIdentityProfile;
autonomy_level?: "ask_first" | "balanced" | "autonomous" | null;
verbosity?: "concise" | "balanced" | "detailed" | null;
custom_instructions?: string | null;
identity_profile?: BoardOnboardingLeadAgentDraftIdentityProfile;
name?: string | null;
output_format?: "bullets" | "mixed" | "narrative" | null;
update_cadence?: "asap" | "hourly" | "daily" | "weekly" | null;
custom_instructions?: string | null;
verbosity?: "concise" | "balanced" | "detailed" | null;
}

View File

@@ -11,12 +11,12 @@ import type { BoardOnboardingReadMessages } from "./boardOnboardingReadMessages"
* Stored onboarding session state returned by API endpoints.
*/
export interface BoardOnboardingRead {
id: string;
board_id: string;
created_at: string;
draft_goal?: BoardOnboardingAgentComplete | null;
id: string;
messages?: BoardOnboardingReadMessages;
session_key: string;
status: string;
messages?: BoardOnboardingReadMessages;
draft_goal?: BoardOnboardingAgentComplete | null;
created_at: string;
updated_at: string;
}

View File

@@ -9,9 +9,9 @@
* User-profile preferences gathered during onboarding.
*/
export interface BoardOnboardingUserProfile {
context?: string | null;
notes?: string | null;
preferred_name?: string | null;
pronouns?: string | null;
timezone?: string | null;
notes?: string | null;
context?: string | null;
}

View File

@@ -10,23 +10,23 @@ import type { BoardReadSuccessMetrics } from "./boardReadSuccessMetrics";
* Board payload returned from read endpoints.
*/
export interface BoardRead {
name: string;
slug: string;
description: string;
gateway_id?: string | null;
block_status_changes_with_pending_approval?: boolean;
board_group_id?: string | null;
board_type?: string;
objective?: string | null;
success_metrics?: BoardReadSuccessMetrics;
target_date?: string | null;
created_at: string;
description: string;
gateway_id?: string | null;
goal_confirmed?: boolean;
goal_source?: string | null;
id: string;
name: string;
objective?: string | null;
only_lead_can_change_status?: boolean;
organization_id: string;
require_approval_for_done?: boolean;
require_review_before_done?: boolean;
block_status_changes_with_pending_approval?: boolean;
only_lead_can_change_status?: boolean;
id: string;
organization_id: string;
created_at: string;
slug: string;
success_metrics?: BoardReadSuccessMetrics;
target_date?: string | null;
updated_at: string;
}

View File

@@ -14,10 +14,10 @@ import type { TaskCardRead } from "./taskCardRead";
* Aggregated board payload used by board snapshot endpoints.
*/
export interface BoardSnapshot {
board: BoardRead;
tasks: TaskCardRead[];
agents: AgentRead[];
approvals: ApprovalRead[];
board: BoardRead;
chat_messages: BoardMemoryRead[];
pending_approvals_count?: number;
tasks: TaskCardRead[];
}

View File

@@ -10,19 +10,19 @@ import type { BoardUpdateSuccessMetrics } from "./boardUpdateSuccessMetrics";
* Payload for partial board updates.
*/
export interface BoardUpdate {
name?: string | null;
slug?: string | null;
description?: string | null;
gateway_id?: string | null;
block_status_changes_with_pending_approval?: boolean | null;
board_group_id?: string | null;
board_type?: string | null;
objective?: string | null;
success_metrics?: BoardUpdateSuccessMetrics;
target_date?: string | null;
description?: string | null;
gateway_id?: string | null;
goal_confirmed?: boolean | null;
goal_source?: string | null;
name?: string | null;
objective?: string | null;
only_lead_can_change_status?: boolean | null;
require_approval_for_done?: boolean | null;
require_review_before_done?: boolean | null;
block_status_changes_with_pending_approval?: boolean | null;
only_lead_can_change_status?: boolean | null;
slug?: string | null;
success_metrics?: BoardUpdateSuccessMetrics;
target_date?: string | null;
}

View File

@@ -10,7 +10,7 @@
*/
export interface DashboardKpis {
active_agents: number;
tasks_in_progress: number;
error_rate_pct: number;
median_cycle_time_hours_7d: number | null;
tasks_in_progress: number;
}

View File

@@ -13,11 +13,11 @@ import type { DashboardWipSeriesSet } from "./dashboardWipSeriesSet";
* Complete dashboard metrics response payload.
*/
export interface DashboardMetrics {
range: DashboardMetricsRange;
generated_at: string;
kpis: DashboardKpis;
throughput: DashboardSeriesSet;
cycle_time: DashboardSeriesSet;
error_rate: DashboardSeriesSet;
generated_at: string;
kpis: DashboardKpis;
range: DashboardMetricsRange;
throughput: DashboardSeriesSet;
wip: DashboardWipSeriesSet;
}

View File

@@ -12,7 +12,7 @@ import type { DashboardSeriesPoint } from "./dashboardSeriesPoint";
* Series payload for a single range/bucket combination.
*/
export interface DashboardRangeSeries {
range: DashboardRangeSeriesRange;
bucket: DashboardRangeSeriesBucket;
points: DashboardSeriesPoint[];
range: DashboardRangeSeriesRange;
}

View File

@@ -10,6 +10,6 @@ import type { DashboardRangeSeries } from "./dashboardRangeSeries";
* Primary vs comparison pair for generic series metrics.
*/
export interface DashboardSeriesSet {
primary: DashboardRangeSeries;
comparison: DashboardRangeSeries;
primary: DashboardRangeSeries;
}

View File

@@ -9,9 +9,9 @@
* Work-in-progress point split by task status buckets.
*/
export interface DashboardWipPoint {
period: string;
inbox: number;
in_progress: number;
review: number;
done: number;
in_progress: number;
inbox: number;
period: string;
review: number;
}

View File

@@ -12,7 +12,7 @@ import type { DashboardWipRangeSeriesRange } from "./dashboardWipRangeSeriesRang
* WIP series payload for a single range/bucket combination.
*/
export interface DashboardWipRangeSeries {
range: DashboardWipRangeSeriesRange;
bucket: DashboardWipRangeSeriesBucket;
points: DashboardWipPoint[];
range: DashboardWipRangeSeriesRange;
}

View File

@@ -10,6 +10,6 @@ import type { DashboardWipRangeSeries } from "./dashboardWipRangeSeries";
* Primary vs comparison pair for WIP status series metrics.
*/
export interface DashboardWipSeriesSet {
primary: DashboardWipRangeSeries;
comparison: DashboardWipRangeSeries;
primary: DashboardWipRangeSeries;
}

View File

@@ -9,7 +9,7 @@
* Gateway command catalog and protocol metadata.
*/
export interface GatewayCommandsResponse {
protocol_version: number;
methods: string[];
events: string[];
methods: string[];
protocol_version: number;
}

View File

@@ -10,7 +10,7 @@
*/
export interface GatewayCreate {
name: string;
token?: string | null;
url: string;
workspace_root: string;
token?: string | null;
}

View File

@@ -10,8 +10,8 @@
*/
export interface GatewayLeadBroadcastBoardResult {
board_id: string;
error?: string | null;
lead_agent_id?: string | null;
lead_agent_name?: string | null;
ok?: boolean;
error?: string | null;
}

View File

@@ -10,11 +10,11 @@ import type { GatewayLeadBroadcastRequestKind } from "./gatewayLeadBroadcastRequ
* Request payload for broadcasting a message to multiple board leads.
*/
export interface GatewayLeadBroadcastRequest {
kind?: GatewayLeadBroadcastRequestKind;
correlation_id?: string | null;
board_ids?: string[] | null;
/** @minLength 1 */
content: string;
board_ids?: string[] | null;
reply_tags?: string[];
correlation_id?: string | null;
kind?: GatewayLeadBroadcastRequestKind;
reply_source?: string | null;
reply_tags?: string[];
}

View File

@@ -10,8 +10,8 @@ import type { GatewayLeadBroadcastBoardResult } from "./gatewayLeadBroadcastBoar
* Aggregate response for a lead broadcast operation.
*/
export interface GatewayLeadBroadcastResponse {
ok?: boolean;
sent?: number;
failed?: number;
ok?: boolean;
results?: GatewayLeadBroadcastBoardResult[];
sent?: number;
}

View File

@@ -10,10 +10,10 @@ import type { GatewayLeadMessageRequestKind } from "./gatewayLeadMessageRequestK
* Request payload for sending a message to a board lead agent.
*/
export interface GatewayLeadMessageRequest {
kind?: GatewayLeadMessageRequestKind;
correlation_id?: string | null;
/** @minLength 1 */
content: string;
reply_tags?: string[];
correlation_id?: string | null;
kind?: GatewayLeadMessageRequestKind;
reply_source?: string | null;
reply_tags?: string[];
}

View File

@@ -9,9 +9,9 @@
* Response payload for a lead-message dispatch attempt.
*/
export interface GatewayLeadMessageResponse {
ok?: boolean;
board_id: string;
lead_agent_id?: string | null;
lead_agent_name?: string | null;
lead_created?: boolean;
ok?: boolean;
}

View File

@@ -9,10 +9,10 @@
* Request payload for asking the end user via a main gateway agent.
*/
export interface GatewayMainAskUserRequest {
correlation_id?: string | null;
/** @minLength 1 */
content: string;
correlation_id?: string | null;
preferred_channel?: string | null;
reply_tags?: string[];
reply_source?: string | null;
reply_tags?: string[];
}

View File

@@ -9,8 +9,8 @@
* Response payload for user-question dispatch via gateway main agent.
*/
export interface GatewayMainAskUserResponse {
ok?: boolean;
board_id: string;
main_agent_id?: string | null;
main_agent_name?: string | null;
ok?: boolean;
}

View File

@@ -9,12 +9,12 @@
* Gateway payload returned from read endpoints.
*/
export interface GatewayRead {
name: string;
url: string;
workspace_root: string;
created_at: string;
id: string;
name: string;
organization_id: string;
token?: string | null;
created_at: string;
updated_at: string;
url: string;
workspace_root: string;
}

View File

@@ -9,6 +9,6 @@
* Gateway sessions list response payload.
*/
export interface GatewaySessionsResponse {
sessions: unknown[];
main_session?: unknown | null;
sessions: unknown[];
}

View File

@@ -10,11 +10,11 @@ import type { GatewayTemplatesSyncError } from "./gatewayTemplatesSyncError";
* Summary payload returned by gateway template sync endpoints.
*/
export interface GatewayTemplatesSyncResult {
agents_skipped: number;
agents_updated: number;
errors?: GatewayTemplatesSyncError[];
gateway_id: string;
include_main: boolean;
reset_sessions: boolean;
agents_updated: number;
agents_skipped: number;
main_updated: boolean;
errors?: GatewayTemplatesSyncError[];
reset_sessions: boolean;
}

View File

@@ -10,7 +10,7 @@
*/
export interface GatewayUpdate {
name?: string | null;
url?: string | null;
token?: string | null;
url?: string | null;
workspace_root?: string | null;
}

View File

@@ -10,10 +10,10 @@
*/
export interface GatewaysStatusResponse {
connected: boolean;
error?: string | null;
gateway_url: string;
sessions_count?: number | null;
sessions?: unknown[] | null;
main_session?: unknown | null;
main_session_error?: string | null;
error?: string | null;
sessions?: unknown[] | null;
sessions_count?: number | null;
}

View File

@@ -188,14 +188,25 @@ export * from "./tagRead";
export * from "./tagRef";
export * from "./tagUpdate";
export * from "./taskCardRead";
export * from "./taskCardReadCustomFieldValues";
export * from "./taskCardReadStatus";
export * from "./taskCommentCreate";
export * from "./taskCommentRead";
export * from "./taskCreate";
export * from "./taskCreateCustomFieldValues";
export * from "./taskCreateStatus";
export * from "./taskCustomFieldDefinitionCreate";
export * from "./taskCustomFieldDefinitionCreateFieldType";
export * from "./taskCustomFieldDefinitionCreateUiVisibility";
export * from "./taskCustomFieldDefinitionRead";
export * from "./taskCustomFieldDefinitionReadFieldType";
export * from "./taskCustomFieldDefinitionReadUiVisibility";
export * from "./taskCustomFieldDefinitionUpdate";
export * from "./taskRead";
export * from "./taskReadCustomFieldValues";
export * from "./taskReadStatus";
export * from "./taskUpdate";
export * from "./taskUpdateCustomFieldValues";
export * from "./updateAgentApiV1AgentsAgentIdPatchParams";
export * from "./userRead";
export * from "./userUpdate";

View File

@@ -8,10 +8,10 @@ import type { ActivityEventRead } from "./activityEventRead";
export interface LimitOffsetPageTypeVarCustomizedActivityEventRead {
items: ActivityEventRead[];
/** @minimum 0 */
total: number;
/** @minimum 1 */
limit: number;
/** @minimum 0 */
offset: number;
/** @minimum 0 */
total: number;
}

View File

@@ -8,10 +8,10 @@ import type { ActivityTaskCommentFeedItemRead } from "./activityTaskCommentFeedI
export interface LimitOffsetPageTypeVarCustomizedActivityTaskCommentFeedItemRead {
items: ActivityTaskCommentFeedItemRead[];
/** @minimum 0 */
total: number;
/** @minimum 1 */
limit: number;
/** @minimum 0 */
offset: number;
/** @minimum 0 */
total: number;
}

View File

@@ -8,10 +8,10 @@ import type { AgentRead } from "./agentRead";
export interface LimitOffsetPageTypeVarCustomizedAgentRead {
items: AgentRead[];
/** @minimum 0 */
total: number;
/** @minimum 1 */
limit: number;
/** @minimum 0 */
offset: number;
/** @minimum 0 */
total: number;
}

View File

@@ -8,10 +8,10 @@ import type { ApprovalRead } from "./approvalRead";
export interface LimitOffsetPageTypeVarCustomizedApprovalRead {
items: ApprovalRead[];
/** @minimum 0 */
total: number;
/** @minimum 1 */
limit: number;
/** @minimum 0 */
offset: number;
/** @minimum 0 */
total: number;
}

View File

@@ -8,10 +8,10 @@ import type { BoardGroupMemoryRead } from "./boardGroupMemoryRead";
export interface LimitOffsetPageTypeVarCustomizedBoardGroupMemoryRead {
items: BoardGroupMemoryRead[];
/** @minimum 0 */
total: number;
/** @minimum 1 */
limit: number;
/** @minimum 0 */
offset: number;
/** @minimum 0 */
total: number;
}

Some files were not shown because too many files have changed in this diff Show More