Skip to content

SOLID/DRY Refactor + Spec-Driven Frontend Wiring — Implementation Plan

For agentic workers: REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (- [ ]) syntax for tracking.

Goal: Eliminate 250+ DRY violations in the backend, add typed response models, then generate a typed API client from the OpenAPI spec and wire all frontend pages to real backend endpoints.

Architecture: Backend gets a BaseRepository, DI factory generators, a generic ListResponse model, and consistent error classes. The OpenAPI spec is regenerated with proper types, then openapi-fetch + openapi-typescript produce a fully typed frontend client. All 11 mock-data pages switch to real API calls. Old hand-written clients and mock JSON files are deleted.

Tech Stack: Python 3.12 / FastAPI / Pydantic v2 / SQLAlchemy async / openapi-typescript / openapi-fetch / React 19 / TypeScript 5.9


Task 1: Backend Core Utilities

Files: - Create: backend/app/core/database/base_repository.py - Create: backend/app/core/responses.py - Modify: backend/app/core/exceptions.py - Modify: backend/app/core/dependencies.py - Create: backend/tests/core/test_base_repository.py - Create: backend/tests/core/test_responses.py

  • [ ] Step 1: Write test for BaseRepository
# backend/tests/core/test_base_repository.py
"""Tests for BaseRepository utility methods."""
import pytest
from unittest.mock import AsyncMock, MagicMock

from app.core.database.base_repository import BaseRepository
from app.core.exceptions import EntityCreationError, NotFoundError


def _make_session(rows):
    """Create a mock AsyncSession that returns the given rows."""
    result = MagicMock()
    result.first.return_value = rows[0] if rows else None
    result.__iter__ = lambda self: iter(rows)
    session = AsyncMock()
    session.execute.return_value = result
    return session


def _row(mapping: dict):
    """Create a mock row with _mapping attribute."""
    r = MagicMock()
    r._mapping = mapping
    return r


@pytest.mark.asyncio
async def test_fetch_all_returns_list_of_dicts():
    rows = [_row({"id": 1, "name": "a"}), _row({"id": 2, "name": "b"})]
    session = _make_session(rows)
    repo = BaseRepository(session)
    result = await repo._fetch_all("SELECT 1", {})
    assert result == [{"id": 1, "name": "a"}, {"id": 2, "name": "b"}]


@pytest.mark.asyncio
async def test_fetch_all_empty():
    session = _make_session([])
    repo = BaseRepository(session)
    result = await repo._fetch_all("SELECT 1", {})
    assert result == []


@pytest.mark.asyncio
async def test_fetch_one_returns_dict():
    session = _make_session([_row({"id": 1})])
    repo = BaseRepository(session)
    result = await repo._fetch_one("SELECT 1", {}, entity="thing")
    assert result == {"id": 1}


@pytest.mark.asyncio
async def test_fetch_one_raises_not_found():
    session = _make_session([])
    repo = BaseRepository(session)
    with pytest.raises(NotFoundError):
        await repo._fetch_one("SELECT 1", {}, entity="widget")


@pytest.mark.asyncio
async def test_fetch_one_or_none_returns_none():
    session = _make_session([])
    repo = BaseRepository(session)
    result = await repo._fetch_one_or_none("SELECT 1", {})
    assert result is None


@pytest.mark.asyncio
async def test_fetch_one_or_none_returns_dict():
    session = _make_session([_row({"id": 5})])
    repo = BaseRepository(session)
    result = await repo._fetch_one_or_none("SELECT 1", {})
    assert result == {"id": 5}


@pytest.mark.asyncio
async def test_insert_returns_dict():
    session = _make_session([_row({"id": 99})])
    repo = BaseRepository(session)
    result = await repo._insert("INSERT ...", {}, entity="item")
    assert result == {"id": 99}


@pytest.mark.asyncio
async def test_insert_raises_creation_error():
    session = _make_session([])
    repo = BaseRepository(session)
    with pytest.raises(EntityCreationError):
        await repo._insert("INSERT ...", {}, entity="item")
  • [ ] Step 2: Run test to verify it fails

cd backend && uv run pytest tests/core/test_base_repository.py -v
Expected: FAIL — ModuleNotFoundError: No module named 'app.core.database.base_repository'

  • [ ] Step 3: Create BaseRepository
# backend/app/core/database/base_repository.py
"""Base repository with shared query execution helpers."""
from __future__ import annotations

from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession

from app.core.exceptions import EntityCreationError, NotFoundError


class BaseRepository:
    """Base class for PostgreSQL repositories.

    Provides helpers that absorb repeated row-conversion and error-handling
    boilerplate across all repository implementations.
    """

    def __init__(self, session: AsyncSession) -> None:
        self.session = session

    async def _fetch_one(
        self, query: str, params: dict, *, entity: str = "record"
    ) -> dict:
        """Execute *query* and return a single row as a dict.

        Raises ``NotFoundError`` when the query returns no rows.
        """
        row = (await self.session.execute(text(query), params)).first()
        if row is None:
            raise NotFoundError(entity, "")
        return dict(row._mapping)

    async def _fetch_one_or_none(
        self, query: str, params: dict
    ) -> dict | None:
        """Execute *query* and return a single row as a dict, or ``None``."""
        row = (await self.session.execute(text(query), params)).first()
        return dict(row._mapping) if row else None

    async def _fetch_all(
        self, query: str, params: dict | None = None
    ) -> list[dict]:
        """Execute *query* and return all rows as a list of dicts."""
        result = await self.session.execute(text(query), params or {})
        return [dict(r._mapping) for r in result]

    async def _insert(
        self, query: str, params: dict, *, entity: str = "record"
    ) -> dict:
        """Execute an INSERT/UPSERT and return the resulting row.

        Raises ``EntityCreationError`` when no row is returned.
        """
        row = (await self.session.execute(text(query), params)).first()
        if row is None:
            raise EntityCreationError(entity)
        return dict(row._mapping)
  • [ ] Step 4: Add EntityCreationError to exceptions.py

Add after the existing ValidationError class in backend/app/core/exceptions.py:

class EntityCreationError(SubstrateError):
    """Raised when an INSERT / UPSERT returns no row."""
    def __init__(self, entity: str):
        super().__init__(
            code=f"{entity.upper()}_CREATION_FAILED",
            message=f"Failed to create {entity}",
            status_code=500,
        )
  • [ ] Step 5: Run BaseRepository tests

cd backend && uv run pytest tests/core/test_base_repository.py -v
Expected: All 8 tests PASS.

  • [ ] Step 6: Write test for list_response
# backend/tests/core/test_responses.py
"""Tests for response envelope helpers."""
from app.core.responses import list_response


def test_list_response_wraps_items():
    items = [{"id": 1}, {"id": 2}]
    result = list_response(items)
    assert result == {"data": items, "meta": {"total": 2}}


def test_list_response_empty():
    result = list_response([])
    assert result == {"data": [], "meta": {"total": 0}}
  • [ ] Step 7: Run test to verify it fails

cd backend && uv run pytest tests/core/test_responses.py -v
Expected: FAIL — ModuleNotFoundError: No module named 'app.core.responses'

  • [ ] Step 8: Create responses.py
# backend/app/core/responses.py
"""Shared response envelope helpers."""
from __future__ import annotations


def list_response(items: list) -> dict:
    """Wrap a list in the standard ``{data, meta}`` envelope."""
    return {"data": items, "meta": {"total": len(items)}}
  • [ ] Step 9: Run responses test

cd backend && uv run pytest tests/core/test_responses.py -v
Expected: PASS.

  • [ ] Step 10: Add DI factory generators to dependencies.py

Replace the full content of backend/app/core/dependencies.py:

"""Dependency injection wiring and factory generators."""
from __future__ import annotations

from typing import Any, Callable

from fastapi import Depends
from sqlalchemy.ext.asyncio import AsyncSession

from app.core.database.postgres import get_session
from app.core.security import UserInfo, get_current_user


# ── convenience aliases ──────────────────────────────────────────────
async def get_db(session: AsyncSession = Depends(get_session)) -> AsyncSession:
    """Alias for get_session for clearer DI."""
    return session


async def get_user(user: UserInfo = Depends(get_current_user)) -> UserInfo:
    """Alias for get_current_user for clearer DI."""
    return user


# ── factory generators ──────────────────────────────────────────────
def repo_provider(repo_class: type) -> Callable:
    """Create a FastAPI dependency that yields ``repo_class(session)``."""

    def _factory(session: AsyncSession = Depends(get_session)) -> Any:
        return repo_class(session)

    # Preserve class name for FastAPI's dependency-injection introspection.
    _factory.__name__ = f"get_{repo_class.__name__}"
    _factory.__qualname__ = _factory.__name__
    return _factory


def service_provider(service_class: type, repo_factory: Callable) -> Callable:
    """Create a FastAPI dependency that yields ``service_class(repo)``."""

    def _factory(repo: Any = Depends(repo_factory)) -> Any:
        return service_class(repo)

    _factory.__name__ = f"get_{service_class.__name__}"
    _factory.__qualname__ = _factory.__name__
    return _factory
  • [ ] Step 11: Run all existing tests to verify nothing broke

cd backend && uv run pytest tests/ -v --tb=short
Expected: All existing tests PASS (we only added new code, nothing changed yet).

  • [ ] Step 12: Commit
cd backend
git add app/core/database/base_repository.py app/core/exceptions.py app/core/responses.py app/core/dependencies.py tests/core/test_base_repository.py tests/core/test_responses.py
git commit -m "feat: add BaseRepository, EntityCreationError, list_response, and DI factories"

Task 2: Migrate Simple Repositories to BaseRepository

These 8 repositories have straightforward patterns: only _fetch_all, _fetch_one_or_none, and _insert calls, no complex custom logic.

Files: - Modify: backend/app/modules/notification/repository.py - Modify: backend/app/modules/policy/repository.py - Modify: backend/app/modules/simulation/repository.py - Modify: backend/app/modules/queue/repository.py - Modify: backend/app/modules/pull_request/repository.py - Modify: backend/app/modules/community/repository.py - Modify: backend/app/modules/memory/repository.py - Modify: backend/app/modules/config/repository.py

Migration pattern

Every repository currently does:

from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession

class XRepository:
    def __init__(self, session: AsyncSession):
        self.session = session

    async def some_list(self):
        query = text("SELECT ...")
        result = await self.session.execute(query, {...})
        return [dict(r._mapping) for r in result]

    async def some_single(self):
        row = (await self.session.execute(text("..."), {...})).first()
        if row is None:
            raise RuntimeError("Failed to ...")
        return dict(row._mapping)

After migration:

from app.core.database.base_repository import BaseRepository

class XRepository(BaseRepository):
    # __init__ is inherited — no need to redefine it

    async def some_list(self):
        return await self._fetch_all("SELECT ...", {...})

    async def some_single(self):
        return await self._insert("...", {...}, entity="x")

Rules: - Remove from sqlalchemy import text and from sqlalchemy.ext.asyncio import AsyncSession - Remove the __init__ method (inherited from BaseRepository) - Replace await self.session.execute(text(q), p) + [dict(r._mapping) for r in result]await self._fetch_all(q, p) - Replace single-row fetch + dict(row._mapping)await self._fetch_one(q, p, entity="x") or await self._fetch_one_or_none(q, p) - Replace insert/upsert + if row is None: raise RuntimeError(...) + dict(row._mapping)await self._insert(q, p, entity="x") - Keep query strings and parameter dicts exactly as they are

  • [ ] Step 1: Migrate notification/repository.py

Replace full content of backend/app/modules/notification/repository.py:

"""Notification data access."""
from __future__ import annotations

from app.core.database.base_repository import BaseRepository


class NotificationRepository(BaseRepository):
    async def list_recent(self, limit: int = 50) -> list[dict]:
        query = """
            SELECT id, title, body, severity, channel,
                   for_roles, created_at
            FROM notifications
            ORDER BY created_at DESC
            LIMIT :limit
        """
        return await self._fetch_all(query, {"limit": limit})
  • [ ] Step 2: Migrate policy/repository.py

Replace full content of backend/app/modules/policy/repository.py:

"""Policy data access."""
from __future__ import annotations

from app.core.database.base_repository import BaseRepository


class PolicyRepository(BaseRepository):
    async def list_all(self) -> list[dict]:
        query = """
            SELECT p.id, p.name, pp.name AS pack, p.level,
                   (SELECT count(*) FROM policy_violations pv
                    WHERE pv.policy_id = p.id AND pv.status = 'open') AS viol,
                   p.active, p.description
            FROM policies p
            JOIN policy_packs pp ON pp.id = p.pack_id
            ORDER BY pp.name, p.name
        """
        return await self._fetch_all(query)

    async def get_pack_names(self) -> list[dict]:
        return await self._fetch_all(
            "SELECT DISTINCT name FROM policy_packs ORDER BY name", {}
        )
  • [ ] Step 3: Migrate simulation/repository.py

Replace full content of backend/app/modules/simulation/repository.py:

"""Simulation data access."""
from __future__ import annotations

from app.core.database.base_repository import BaseRepository


class SimulationRepository(BaseRepository):
    async def get_latest(self) -> dict | None:
        query = """
            SELECT id, title, prompt, status, duration_label,
                   summary, before_badges, after_badges,
                   blast_radius_label, policy_delta, created_at
            FROM simulation_results
            ORDER BY created_at DESC
            LIMIT 1
        """
        return await self._fetch_one_or_none(query, {})
  • [ ] Step 4: Migrate queue/repository.py

Replace full content of backend/app/modules/queue/repository.py:

"""Queue data access."""
from __future__ import annotations

from uuid import UUID

from app.core.database.base_repository import BaseRepository


class QueueRepository(BaseRepository):
    async def list_pending(self) -> list[dict]:
        query = """
            SELECT id, entity_name, entity_type, flag_type,
                   confidence, severity, reason, status, created_at
            FROM queue_items
            WHERE status = 'pending'
            ORDER BY
                CASE severity WHEN 'critical' THEN 0
                              WHEN 'high' THEN 1
                              WHEN 'medium' THEN 2
                              ELSE 3 END,
                created_at
        """
        return await self._fetch_all(query)

    async def update_status(
        self, item_id: UUID, status: str, resolution_note: str | None
    ) -> dict:
        query = """
            UPDATE queue_items
            SET status = :status, resolution_note = :note,
                resolved_at = now()
            WHERE id = CAST(:id AS uuid)
            RETURNING *
        """
        return await self._insert(
            query,
            {"id": str(item_id), "status": status, "note": resolution_note},
            entity="queue_item",
        )
  • [ ] Step 5: Migrate pull_request/repository.py

Replace full content of backend/app/modules/pull_request/repository.py:

"""Pull request data access."""
from __future__ import annotations

from app.core.database.base_repository import BaseRepository


class PullRequestRepository(BaseRepository):
    async def list_active(self) -> list[dict]:
        query = """
            SELECT id, pr_number, title, author, repo, status,
                   violations, impact_score, blast_radius,
                   created_at, updated_at
            FROM pull_requests
            WHERE status IN ('open', 'review')
            ORDER BY updated_at DESC
        """
        return await self._fetch_all(query)

    async def list_history(self) -> list[dict]:
        query = """
            SELECT id, pr_number, title, author, repo,
                   policies_evaluated, result, violations_found,
                   resolved_count, created_at
            FROM pull_request_history
            ORDER BY created_at DESC
            LIMIT 50
        """
        return await self._fetch_all(query)
  • [ ] Step 6: Migrate community/repository.py

Replace full content of backend/app/modules/community/repository.py:

"""Community data access."""
from __future__ import annotations

from app.core.database.base_repository import BaseRepository


class CommunityRepository(BaseRepository):
    async def list_all(self) -> list[dict]:
        query = """
            SELECT
                c.id, c.name, c.slug, c.color, c.description,
                c.tension, c.violation_count, c.trend, c.trend_delta,
                COALESCE(
                    array_agg(s.slug) FILTER (WHERE s.slug IS NOT NULL),
                    ARRAY[]::TEXT[]
                ) as service_slugs,
                COALESCE(
                    array_agg(s.name) FILTER (WHERE s.name IS NOT NULL),
                    ARRAY[]::TEXT[]
                ) as service_names
            FROM communities c
            LEFT JOIN community_services cs ON cs.community_id = c.id
            LEFT JOIN services s ON s.id = cs.service_id
            GROUP BY c.id
            ORDER BY c.name
        """
        return await self._fetch_all(query)

    async def get_by_slug(self, slug: str) -> dict | None:
        query = """
            SELECT
                c.id, c.name, c.slug, c.color, c.description,
                c.tension, c.violation_count, c.trend, c.trend_delta,
                COALESCE(
                    array_agg(s.slug) FILTER (WHERE s.slug IS NOT NULL),
                    ARRAY[]::TEXT[]
                ) as service_slugs,
                COALESCE(
                    array_agg(s.name) FILTER (WHERE s.name IS NOT NULL),
                    ARRAY[]::TEXT[]
                ) as service_names
            FROM communities c
            LEFT JOIN community_services cs ON cs.community_id = c.id
            LEFT JOIN services s ON s.id = cs.service_id
            WHERE c.slug = :slug
            GROUP BY c.id
        """
        return await self._fetch_one_or_none(query, {"slug": slug})

    async def get_create_data(self) -> dict:
        teams = await self._fetch_all(
            "SELECT id, name FROM teams ORDER BY name", {}
        )
        packs = await self._fetch_all(
            "SELECT id, name FROM policy_packs ORDER BY name", {}
        )
        services = await self._fetch_all(
            "SELECT slug, name FROM services ORDER BY name", {}
        )
        return {"teams": teams, "packs": packs, "services": services}
  • [ ] Step 7: Migrate memory/repository.py

Replace full content of backend/app/modules/memory/repository.py:

"""Memory data access."""
from __future__ import annotations

import json

from app.core.database.base_repository import BaseRepository


class MemoryRepository(BaseRepository):
    async def list_all(self) -> list[dict]:
        query = """
            SELECT id, entry_type, title, body, author_handle,
                   service_slug, created_at
            FROM memory_entries
            ORDER BY created_at DESC
        """
        return await self._fetch_all(query)

    async def get_stats(self) -> list[dict]:
        query = """
            SELECT entry_type AS label,
                   count(*)::int AS value
            FROM memory_entries
            GROUP BY entry_type
            ORDER BY value DESC
        """
        return await self._fetch_all(query)

    async def get_gaps(self) -> list[dict]:
        query = """
            SELECT s.name, s.slug,
                   count(me.id)::int AS entry_count
            FROM services s
            LEFT JOIN memory_entries me ON me.service_slug = s.slug
            GROUP BY s.id
            HAVING count(me.id) = 0
            ORDER BY s.name
            LIMIT 5
        """
        return await self._fetch_all(query)

    async def get_recent_submissions(self) -> list[dict]:
        query = """
            SELECT id, entry_type, title, author_handle,
                   service_slug, created_at
            FROM memory_entries
            ORDER BY created_at DESC
            LIMIT 5
        """
        return await self._fetch_all(query)

    async def get_service_slugs(self) -> list[dict]:
        return await self._fetch_all(
            "SELECT slug, name FROM services ORDER BY name", {}
        )

    async def create_entry(
        self,
        entry_type: str,
        title: str,
        body: str,
        author_handle: str,
        service_slug: str | None,
    ) -> dict:
        query = """
            INSERT INTO memory_entries
                (entry_type, title, body, author_handle, service_slug)
            VALUES (:entry_type, :title, :body, :author_handle, :service_slug)
            RETURNING id, entry_type, title, body, author_handle,
                      service_slug, created_at
        """
        return await self._insert(
            query,
            {
                "entry_type": entry_type,
                "title": title,
                "body": body,
                "author_handle": author_handle,
                "service_slug": service_slug,
            },
            entity="memory_entry",
        )
  • [ ] Step 8: Migrate config/repository.py

Replace full content of backend/app/modules/config/repository.py:

"""Config data access — per-org runtime settings."""
from __future__ import annotations

import json

from app.core.database.base_repository import BaseRepository


class ConfigRepository(BaseRepository):
    async def get_org_settings(self, org_id: str, domain: str) -> dict | None:
        query = """
            SELECT config_json
            FROM org_settings
            WHERE org_id = CAST(:org_id AS uuid) AND domain = :domain
        """
        return await self._fetch_one_or_none(query, {"org_id": org_id, "domain": domain})

    async def upsert_org_settings(
        self, org_id: str, domain: str, config_json: dict, user_id: str
    ) -> dict:
        query = """
            INSERT INTO org_settings (org_id, domain, config_json, updated_by)
            VALUES (CAST(:org_id AS uuid), :domain, CAST(:config AS jsonb), :user_id)
            ON CONFLICT (org_id, domain)
            DO UPDATE SET config_json = CAST(:config AS jsonb),
                          updated_by = :user_id,
                          updated_at = now()
            RETURNING org_id, domain, config_json, updated_at
        """
        return await self._insert(
            query,
            {
                "org_id": org_id,
                "domain": domain,
                "config": json.dumps(config_json),
                "user_id": user_id,
            },
            entity="org_settings",
        )

    async def delete_org_settings(self, org_id: str, domain: str) -> None:
        await self.session.execute(
            __import__("sqlalchemy").text(
                "DELETE FROM org_settings WHERE org_id = CAST(:org_id AS uuid) AND domain = :domain"
            ),
            {"org_id": org_id, "domain": domain},
        )
  • [ ] Step 9: Run all tests

cd backend && uv run pytest tests/ -v --tb=short
Expected: All tests PASS. The repositories are drop-in replacements — same interface, same SQL, same return types.

  • [ ] Step 10: Commit
cd backend
git add app/modules/notification/repository.py app/modules/policy/repository.py app/modules/simulation/repository.py app/modules/queue/repository.py app/modules/pull_request/repository.py app/modules/community/repository.py app/modules/memory/repository.py app/modules/config/repository.py
git commit -m "refactor: migrate 8 simple repositories to BaseRepository"

Task 3: Migrate Complex Repositories to BaseRepository

These repositories have more methods, JSON casting, UUID casting, and multi-step operations. Same migration pattern as Task 2 but more methods per file.

Files: - Modify: backend/app/modules/auth/repository.py - Modify: backend/app/modules/billing/repository.py - Modify: backend/app/modules/licensing/repository.py - Modify: backend/app/modules/iam/repository.py - Modify: backend/app/modules/marketplace/repository.py - Modify: backend/app/modules/connectors/repository.py - Modify: backend/app/modules/policy_runtime/repository.py

The graph repository (backend/app/modules/graph/repository.py) uses Neo4j's AsyncSession, not SQLAlchemy — it stays unchanged.

  • [ ] Step 1: Migrate auth/repository.py

The auth repository has 5 methods. Replace the class declaration and all method bodies. Key changes: - class AuthRepository(BaseRepository): — remove custom __init__ - upsert_profile() → use self._insert(..., entity="user_profile") - list_tokens_for_user() → use self._fetch_all(...) - create_token_for_user() → use self._insert(..., entity="api_token") - rotate_token_for_user() → use self._insert(..., entity="api_token") - revoke_token_for_user() → use self._fetch_one(..., entity="api_token")

Change the imports at the top of the file to:

"""Auth data access — user profiles and API tokens."""
from __future__ import annotations

import hashlib
import json
import secrets
from datetime import datetime, timezone

from app.core.database.base_repository import BaseRepository

Change the class declaration from:

class AuthRepository:
    def __init__(self, session: AsyncSession):
        self.session = session
to:
class AuthRepository(BaseRepository):

Then in each method, replace the execution pattern. For example, in upsert_profile, replace:

result = await self.session.execute(text(query), params)
row = result.first()
if row is None:
    raise RuntimeError("Failed to upsert user profile")
return dict(row._mapping)
with:
return await self._insert(query, params, entity="user_profile")

And in list_tokens_for_user, replace:

result = await self.session.execute(text(query), {"profile_id": str(profile_id)})
return [dict(r._mapping) for r in result]
with:
return await self._fetch_all(query, {"profile_id": str(profile_id)})

Apply this pattern to every method. Keep all SQL strings and parameter dicts exactly as-is.

  • [ ] Step 2: Migrate billing/repository.py

Same pattern. Change imports to use BaseRepository. Change class to class BillingRepository(BaseRepository):. Remove __init__. For each method: - get_billing_account()self._fetch_one_or_none(...) - upsert_billing_account()self._insert(..., entity="billing_account") - create_transaction()self._insert(..., entity="transaction") - list_transactions()self._fetch_all(...) - list_invoices()self._fetch_all(...) - get_usage()self._fetch_one_or_none(...)

  • [ ] Step 3: Migrate licensing/repository.py

Same pattern. Methods: - get_org_license()self._fetch_one_or_none(...) - upsert_org_license()self._insert(..., entity="org_license") - list_entitlements()self._fetch_all(...) - check_entitlement()self._fetch_one_or_none(...)

  • [ ] Step 4: Migrate iam/repository.py

Largest repository (350 lines, 16 methods). Same pattern throughout: - All list_* methods → self._fetch_all(...) - All create_* / add_* methods → self._insert(..., entity="...") - get_user_profile_by_sub()self._fetch_one_or_none(...) - org_exists() and team_belongs_to_org()self._fetch_one_or_none(...) (check for None in service layer) - update_team_membership_role()self._insert(...) (uses RETURNING)

  • [ ] Step 5: Migrate marketplace/repository.py

Largest repository (743 lines, 18 methods). Same pattern. Key methods: - list_catalog(), list_installed(), list_module_requests()self._fetch_all(...) - get_module_by_key(), get_active_entitlement(), get_latest_license(), get_installed_by_id()self._fetch_one_or_none(...) - create_purchase(), create_entitlement(), create_license(), upsert_install(), register_uploaded_module(), create_module_request()self._insert(..., entity="...") - update_installed_state(), update_installed_config(), update_module_request()self._insert(...) (uses RETURNING) - record_event()self._insert(..., entity="module_event") - uninstall_module() → raw self.session.execute() (DELETE, no return value)

For uninstall_module() which doesn't return data, keep the raw execute:

async def uninstall_module(self, org_id: str, installation_id: str) -> None:
    from sqlalchemy import text
    await self.session.execute(
        text("DELETE FROM installed_modules WHERE org_id = CAST(:org_id AS uuid) AND id = CAST(:id AS uuid)"),
        {"org_id": org_id, "id": installation_id},
    )

  • [ ] Step 6: Migrate connectors/repository.py

Same pattern. Methods: - list_connector_catalog(), list_instances()self._fetch_all(...) - get_instance()self._fetch_one_or_none(...) - upsert_instance_config()self._insert(..., entity="connector_instance") - create_sync_run()self._insert(..., entity="sync_run") - complete_sync_run()self._insert(...) (UPDATE ... RETURNING) - ensure_instance_for_install()self._insert(..., entity="connector_instance")

  • [ ] Step 7: Migrate policy_runtime/repository.py

Same pattern. Methods: - list_bindings()self._fetch_all(...) - get_binding_by_pack_key()self._fetch_one_or_none(...) - ensure_binding()self._insert(..., entity="policy_pack_binding") - update_binding_config()self._insert(...) (UPDATE ... RETURNING) - log_decision()self._insert(..., entity="policy_decision") - enrich_binding_with_module()self._fetch_one_or_none(...)

  • [ ] Step 8: Run all tests

cd backend && uv run pytest tests/ -v --tb=short
Expected: All tests PASS.

  • [ ] Step 9: Commit
cd backend
git add app/modules/auth/repository.py app/modules/billing/repository.py app/modules/licensing/repository.py app/modules/iam/repository.py app/modules/marketplace/repository.py app/modules/connectors/repository.py app/modules/policy_runtime/repository.py
git commit -m "refactor: migrate 7 complex repositories to BaseRepository"

Task 4: Migrate Dependencies + Response Envelopes in Routers

Files: - Modify: 12 simple dependencies.py files (auth, community, policy, memory, queue, pull_request, simulation, notification, connectors, iam, config, licensing) - Modify: 8 routers that use manual {"data": ..., "meta": ...} envelopes (community, notification, policy, memory, queue, pull_request, dashboard, search)

Complex dependencies (marketplace, billing, policy_runtime, plugins, team_admin, dashboard, search, graph) keep custom factories — they have extra DI needs beyond repo_provider/service_provider.

  • [ ] Step 1: Migrate 12 simple dependencies.py files

For each of these modules, replace the full content of dependencies.py:

auth/dependencies.py:

"""Auth module dependency wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.auth.repository import AuthRepository
from app.modules.auth.service import AuthService

get_auth_repo = repo_provider(AuthRepository)
get_auth_service = service_provider(AuthService, get_auth_repo)

community/dependencies.py:

"""Community module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.community.repository import CommunityRepository
from app.modules.community.service import CommunityService

get_community_repo = repo_provider(CommunityRepository)
get_community_service = service_provider(CommunityService, get_community_repo)

policy/dependencies.py:

"""Policy module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.policy.repository import PolicyRepository
from app.modules.policy.service import PolicyService

get_policy_repo = repo_provider(PolicyRepository)
get_policy_service = service_provider(PolicyService, get_policy_repo)

memory/dependencies.py:

"""Memory module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.memory.repository import MemoryRepository
from app.modules.memory.service import MemoryService

get_memory_repo = repo_provider(MemoryRepository)
get_memory_service = service_provider(MemoryService, get_memory_repo)

queue/dependencies.py:

"""Queue module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.queue.repository import QueueRepository
from app.modules.queue.service import QueueService

get_queue_repo = repo_provider(QueueRepository)
get_queue_service = service_provider(QueueService, get_queue_repo)

pull_request/dependencies.py:

"""Pull request module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.pull_request.repository import PullRequestRepository
from app.modules.pull_request.service import PullRequestService

get_pr_repo = repo_provider(PullRequestRepository)
get_pr_service = service_provider(PullRequestService, get_pr_repo)

simulation/dependencies.py:

"""Simulation module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.simulation.repository import SimulationRepository
from app.modules.simulation.service import SimulationService

get_sim_repo = repo_provider(SimulationRepository)
get_sim_service = service_provider(SimulationService, get_sim_repo)

notification/dependencies.py:

"""Notification module DI wiring."""
from app.core.dependencies import repo_provider, service_provider
from app.modules.notification.repository import NotificationRepository
from app.modules.notification.service import NotificationService

get_notification_repo = repo_provider(NotificationRepository)
get_notification_service = service_provider(NotificationService, get_notification_repo)

connectors/dependencies.py:

"""Connector module dependencies."""
from fastapi import Depends
from sqlalchemy.ext.asyncio import AsyncSession

from app.core.database.postgres import get_session
from app.core.dependencies import repo_provider
from app.modules.connectors.repository import ConnectorRepository
from app.modules.connectors.service import ConnectorService

get_connector_repo = repo_provider(ConnectorRepository)


def get_connector_service(
    repo: ConnectorRepository = Depends(get_connector_repo),
    session: AsyncSession = Depends(get_session),
) -> ConnectorService:
    return ConnectorService(repo, session)

iam/dependencies.py:

"""IAM module dependencies."""
from fastapi import Depends
from sqlalchemy.ext.asyncio import AsyncSession

from app.core.database.postgres import get_session
from app.core.dependencies import repo_provider
from app.modules.iam.repository import IamRepository
from app.modules.iam.service import IamService

get_iam_repo = repo_provider(IamRepository)


def get_iam_service(
    repo: IamRepository = Depends(get_iam_repo),
    session: AsyncSession = Depends(get_session),
) -> IamService:
    return IamService(repo, session)

config/dependencies.py:

"""Dependency injection for config module."""
from app.core.dependencies import repo_provider
from app.modules.config.repository import ConfigRepository
from app.modules.config.service import ConfigService
from fastapi import Depends


get_config_repo = repo_provider(ConfigRepository)


def get_config_service(repo: ConfigRepository = Depends(get_config_repo)) -> ConfigService:
    return ConfigService(repo=repo)

licensing/dependencies.py — keeps custom factory (needs settings for token service):

"""Dependency injection for the licensing module."""
from __future__ import annotations

from fastapi import Depends
from sqlalchemy.ext.asyncio import AsyncSession

from app.core.database.postgres import get_session
from app.core.dependencies import repo_provider
from app.modules.licensing.repository import LicensingRepository
from app.modules.licensing.service import LicenseTokenService, LicensingService
from app.settings import get_settings

get_licensing_repo = repo_provider(LicensingRepository)


def get_licensing_service(
    repo: LicensingRepository = Depends(get_licensing_repo),
) -> LicensingService:
    settings = get_settings()
    token_service = LicenseTokenService(
        private_key_pem=settings.marketplace_license_private_key_pem,
        public_key_pem=settings.marketplace_license_public_key_pem,
        kid=settings.marketplace_license_kid,
    )
    return LicensingService(repo=repo, token_service=token_service)

Remaining 4 modules (marketplace, billing, policy_runtime, plugins) keep their current custom factories — they have dependencies beyond repo_provider/service_provider (payment providers, OPA client, plugin registry). Use repo_provider for just the repo part where applicable.

  • [ ] Step 2: Replace response envelopes in routers

In 8 router files, replace {"data": items, "meta": {"total": len(items)}} with list_response(items).

Add from app.core.responses import list_response to each file and replace the return statements.

community/router.py line 19:

# Before:
return {"data": communities, "meta": {"total": len(communities)}}
# After:
return list_response(communities)

notification/router.py line 17:

return list_response(notifications)

policy/router.py line 17:

return list_response(policies)

memory/router.py line 18:

return list_response(entries)

queue/router.py line 20:

return list_response(items)

pull_request/router.py lines 17 and 25:

return list_response(prs)
# ...
return list_response(history)

  • [ ] Step 3: Run all tests

cd backend && uv run pytest tests/ -v --tb=short
Expected: All tests PASS.

  • [ ] Step 4: Commit
cd backend
git add app/modules/*/dependencies.py app/modules/*/router.py
git commit -m "refactor: migrate DI to factory generators and use list_response helper"

Task 5: Regenerate OpenAPI Spec

Files: - Modify: api/generate-types.sh - Regenerate: api/openapi.yml - Regenerate: ui/src/types/openapi.generated.ts

  • [ ] Step 1: Fix generate-types.sh

The current script has issues: stray } on line 50, uses python3 instead of uv run python. Replace the full content of api/generate-types.sh:

#!/usr/bin/env bash
set -euo pipefail

ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
SPEC_DIR="${ROOT_DIR}/api"
SPEC_FILE="${SPEC_DIR}/openapi.json"
BACKEND_DIR="${ROOT_DIR}/backend"
UI_DIR="${ROOT_DIR}/ui"

mkdir -p "${SPEC_DIR}" "${UI_DIR}/src/types"

echo "[1/2] Generating OpenAPI JSON from FastAPI app..."
cd "${BACKEND_DIR}"
uv run python -c "
import json, sys
from app.main import create_app
app = create_app()
spec = app.openapi()
with open('${SPEC_FILE}', 'w') as f:
    json.dump(spec, f, indent=2)
print(f'wrote ${SPEC_FILE}')
"

echo "[2/2] Generating TypeScript types for UI..."
cd "${UI_DIR}"
npx --yes openapi-typescript "${SPEC_FILE}" --output "${UI_DIR}/src/types/api.generated.ts"

echo "Done. Updated:"
echo "  - ${SPEC_FILE}"
echo "  - ${UI_DIR}/src/types/api.generated.ts"
  • [ ] Step 2: Run the generation script

cd /home/dany/substrate && bash api/generate-types.sh
Expected: Outputs openapi.json and api.generated.ts.

  • [ ] Step 3: Verify the generated types contain expected paths

grep -c "'/api/v1/" ui/src/types/api.generated.ts
Expected: 20+ path entries matching the backend endpoints.

  • [ ] Step 4: Commit
git add api/generate-types.sh api/openapi.json ui/src/types/api.generated.ts
git commit -m "build: regenerate OpenAPI spec and TypeScript types from backend"

Task 6: Frontend Typed Client Setup

Files: - Modify: ui/package.json - Rewrite: ui/src/api/client.ts

  • [ ] Step 1: Install openapi-fetch
cd /home/dany/substrate/ui && npm install openapi-fetch
  • [ ] Step 2: Add generate script to package.json

Add to the scripts section of ui/package.json:

"generate:api": "openapi-typescript ../api/openapi.json -o src/types/api.generated.ts"
  • [ ] Step 3: Rewrite client.ts with openapi-fetch

Replace the full content of ui/src/api/client.ts:

/**
 * Typed API client generated from the OpenAPI spec.
 * All paths are fully typed — call api.GET("/api/v1/communities") etc.
 */
import createClient, { type Middleware } from 'openapi-fetch'
import type { paths } from '../types/api.generated'

// ── Token accessor — set by AuthBridge ──────────────────────────────
let _getAccessToken: (() => string | undefined) | null = null

export function setTokenAccessor(fn: () => string | undefined) {
  _getAccessToken = fn
}

// ── Error class ─────────────────────────────────────────────────────
export class ApiError extends Error {
  constructor(
    public status: number,
    public body: string,
  ) {
    super(`API error ${status}: ${body}`)
    this.name = 'ApiError'
  }
}

// ── Middleware ───────────────────────────────────────────────────────
const authMiddleware: Middleware = {
  async onRequest({ request }) {
    const token = _getAccessToken?.()
    if (token) {
      request.headers.set('Authorization', `Bearer ${token}`)
    }
    return request
  },
}

// ── Client instance ─────────────────────────────────────────────────
export const api = createClient<paths>({ baseUrl: '' })
api.use(authMiddleware)

Note: baseUrl is empty because paths in the generated types already include /api/v1. The Vite dev proxy and Docker nginx both handle forwarding /api/v1/* to the backend.

  • [ ] Step 4: Verify AuthBridge still works

Read ui/src/api/authBridge.ts — it imports setTokenAccessor from ./client. The import still works since client.ts still exports setTokenAccessor. No changes needed to authBridge.ts.

  • [ ] Step 5: Commit
cd /home/dany/substrate
git add ui/package.json ui/package-lock.json ui/src/api/client.ts
git commit -m "feat: set up openapi-fetch typed client with auth middleware"

Task 7: Migrate Mock-Data Pages to Real API

Replace all substrateApi.getX() mock calls with typed api.GET(...) calls in the 11 pages that use mock data, plus Topbar and Sidebar.

Files: - Modify: ui/src/pages/DashboardPage.tsx - Modify: ui/src/pages/CommunitiesPage.tsx - Modify: ui/src/pages/GraphPage.tsx - Modify: ui/src/pages/MemoryPage.tsx - Modify: ui/src/pages/SearchPage.tsx - Modify: ui/src/pages/QueuePage.tsx - Modify: ui/src/pages/SimulationPage.tsx - Modify: ui/src/pages/PullRequestsPage.tsx - Modify: ui/src/pages/CreateCommunityPage.tsx - Modify: ui/src/pages/CreatePolicyPage.tsx - Modify: ui/src/pages/ContributeMemoryPage.tsx - Modify: ui/src/app/Topbar.tsx - Modify: ui/src/app/Sidebar.tsx

Migration pattern

Every page currently does:

import { substrateApi } from '../api/substrateApi'

const { data, loading, error } = useApiData(substrateApi.getX, [])

After migration:

import { api } from '../api/client'

const { data, loading, error } = useApiData(
  () => api.GET('/api/v1/x').then(r => r.data),
  []
)

The useApiData hook accepts any () => Promise<T> function. We wrap the openapi-fetch call to extract .data from the response (openapi-fetch returns { data, error, response }).

For endpoints that return the {"data": [...], "meta": {...}} envelope, the page code that previously received the inner array now receives the envelope and must access .data:

// Before: data was Community[] directly
const communities = data

// After: data is { data: Community[], meta: { total: number } }
const communities = data?.data

  • [ ] Step 1: Migrate CommunitiesPage.tsx

In ui/src/pages/CommunitiesPage.tsx:

Replace the substrateApi import:

// Remove:
import { substrateApi } from '../api/substrateApi'
// Add:
import { api } from '../api/client'

Replace the data loading:

// Before:
const { data: communities, loading, error } = useApiData(substrateApi.getCommunities, [])

// After:
const { data: response, loading, error } = useApiData(
  () => api.GET('/api/v1/communities').then(r => r.data),
  []
)
const communities = response?.data

  • [ ] Step 2: Migrate MemoryPage.tsx

Replace import and data loading:

import { api } from '../api/client'

const { data: entriesResp, loading: el, error: ee } = useApiData(
  () => api.GET('/api/v1/memory').then(r => r.data),
  []
)
const entries = entriesResp?.data
const { data: meta, loading: ml, error: me } = useApiData(
  () => api.GET('/api/v1/memory/meta').then(r => r.data),
  []
)

  • [ ] Step 3: Migrate QueuePage.tsx
import { api } from '../api/client'

const { data: response, loading, error } = useApiData(
  () => api.GET('/api/v1/queue').then(r => r.data),
  []
)
const queueItems = response?.data
  • [ ] Step 4: Migrate PullRequestsPage.tsx
import { api } from '../api/client'

const { data: prResp, loading: pl, error: pe } = useApiData(
  () => api.GET('/api/v1/pull-requests').then(r => r.data),
  []
)
const prs = prResp?.data
const { data: histResp, loading: hl, error: he } = useApiData(
  () => api.GET('/api/v1/pull-requests/history').then(r => r.data),
  []
)
const history = histResp?.data

Remove the blastRadiusTemplateData import from '../data/pull-request-blast-radius.json' — keep it only if it's used as a local UI template (not API data). If the blast radius is only for local SVG rendering, it can stay.

  • [ ] Step 5: Migrate SearchPage.tsx
import { api } from '../api/client'

const { data, loading, error } = useApiData(
  () => api.GET('/api/v1/search/data').then(r => r.data),
  []
)
  • [ ] Step 6: Migrate SimulationPage.tsx
import { api } from '../api/client'

const { data, loading, error } = useApiData(
  () => api.GET('/api/v1/simulation').then(r => r.data),
  []
)
  • [ ] Step 7: Migrate DashboardPage.tsx
import { api } from '../api/client'

const { data, loading, error } = useApiData(
  () => api.GET('/api/v1/dashboard').then(r => r.data),
  [perspective]
)

Note: The dashboard endpoint doesn't currently accept a perspective query param. The page passes perspective to getDashboardData() which selects from mock JSON keyed by perspective. After migration, the backend returns a single dashboard — the perspective-based filtering is a Phase 1 feature.

  • [ ] Step 8: Migrate GraphPage.tsx
import { api } from '../api/client'

const { data: graphResp, loading: gl, error: ge } = useApiData(
  () => api.GET('/api/v1/communities/{slug}/graph', {
    params: { path: { slug: communitySlug || 'all' } }
  }).then(r => r.data),
  [communitySlug]
)
const nodes = graphResp?.data?.nodes
const edges = graphResp?.data?.edges

const { data: commResp, loading: cl, error: ce } = useApiData(
  () => api.GET('/api/v1/communities').then(r => r.data),
  []
)
const communities = commResp?.data
  • [ ] Step 9: Migrate CreateCommunityPage.tsx
import { api } from '../api/client'

const { data: formData, loading: fl, error: fe } = useApiData(
  () => api.GET('/api/v1/communities/create-data').then(r => r.data),
  []
)
const { data: commResp, loading: cl, error: ce } = useApiData(
  () => api.GET('/api/v1/communities').then(r => r.data),
  []
)
const communities = commResp?.data
  • [ ] Step 10: Migrate CreatePolicyPage.tsx
import { api } from '../api/client'

const { data, loading, error } = useApiData(
  () => api.GET('/api/v1/policies/create-data').then(r => r.data),
  []
)
  • [ ] Step 11: Migrate ContributeMemoryPage.tsx
import { api } from '../api/client'

const { data, loading, error } = useApiData(
  () => api.GET('/api/v1/memory/contribute').then(r => r.data),
  []
)
  • [ ] Step 12: Migrate Topbar.tsx

Replace:

import { substrateApi } from '../api/substrateApi'
with:
import { api } from '../api/client'

Replace notification loading:

const { data: notifications } = useApiData(
  () => api.GET('/api/v1/notifications').then(r => r.data?.data),
  [currentUser.role]
)

Replace brain health loading:

const { data: brainHealth } = useApiData(
  () => api.GET('/api/v1/dashboard').then(r => r.data),
  []
)

  • [ ] Step 13: Migrate Sidebar.tsx

Replace:

import { substrateApi } from '../api/substrateApi'
with:
import { api } from '../api/client'

Replace engine loading:

const { data: dashboard } = useApiData(
  () => api.GET('/api/v1/dashboard').then(r => r.data),
  []
)

  • [ ] Step 14: Verify build

cd /home/dany/substrate/ui && npm run build
Expected: TypeScript compilation succeeds with no errors.

  • [ ] Step 15: Commit
cd /home/dany/substrate
git add ui/src/pages/ ui/src/app/Topbar.tsx ui/src/app/Sidebar.tsx
git commit -m "feat: wire all mock-data pages to real backend API via typed client"

Task 8: Migrate Settings Pages to Typed Client

These pages already call real endpoints but use hand-written clients (billingApi, configApi, orgApi, substrateApi). Switch them to the typed api client.

Files: - Modify: ui/src/pages/settings/ApiTokensPage.tsx - Modify: ui/src/pages/settings/ProfileSettingsPage.tsx - Modify: ui/src/pages/settings/TeamSettingsPage.tsx - Modify: ui/src/pages/settings/BillingPage.tsx - Modify: ui/src/pages/settings/LlmConnectionsPage.tsx - Modify: ui/src/pages/settings/OrgSettingsPage.tsx - Modify: ui/src/pages/settings/PlatformDataPage.tsx - Modify: ui/src/pages/settings/PreferencesPage.tsx - Modify: ui/src/pages/settings/MarketplacePage.tsx - Modify: ui/src/pages/SettingsPage.tsx

Migration pattern

Replace imports and calls:

// Before:
import { billingApi } from '../../api/billingApi'
billingApi.getAccount()

// After:
import { api } from '../../api/client'
api.GET('/api/v1/billing/account').then(r => r.data)

For mutations:

// Before:
billingApi.configureAccount({ billing_email: email })

// After:
api.PUT('/api/v1/billing/account', { body: { billing_email: email } }).then(r => r.data)

  • [ ] Step 1: Migrate ApiTokensPage.tsx

Replace substrateApi import with api from '../../api/client'. Replace: - substrateApi.listApiTokens()api.GET('/api/v1/auth/tokens').then(r => r.data) - substrateApi.createApiToken(payload)api.POST('/api/v1/auth/tokens', { body: payload }).then(r => r.data)

  • [ ] Step 2: Migrate ProfileSettingsPage.tsx

Replace: - substrateApi.getMyProfile()api.GET('/api/v1/auth/me').then(r => r.data)

  • [ ] Step 3: Migrate TeamSettingsPage.tsx

Replace orgApi import with api. Replace: - orgApi.getCurrentOrg()api.GET('/api/v1/iam/me/access').then(r => r.data) - orgApi.listOrgs()api.GET('/api/v1/iam/orgs').then(r => r.data) - orgApi.listTeams(orgId)api.GET('/api/v1/iam/orgs/{org_id}/teams', { params: { path: { org_id: orgId } } }).then(r => r.data)

  • [ ] Step 4: Migrate BillingPage.tsx

Replace billingApi import with api. Replace: - billingApi.getAccount()api.GET('/api/v1/billing/account').then(r => r.data) - billingApi.getUsage()api.GET('/api/v1/billing/usage').then(r => r.data) - billingApi.getTransactions()api.GET('/api/v1/billing/transactions').then(r => r.data)

  • [ ] Step 5: Migrate LlmConnectionsPage.tsx

Replace configApi import with api. Replace: - configApi.getConfig('llm_connections')api.GET('/api/v1/config/{domain}', { params: { path: { domain: 'llm_connections' } } }).then(r => r.data)

  • [ ] Step 6: Migrate OrgSettingsPage.tsx

Replace configApi import with api. Replace: - configApi.getConfig('org_profile')api.GET('/api/v1/config/{domain}', { params: { path: { domain: 'org_profile' } } }).then(r => r.data) - configApi.updateConfig('org_profile', data)api.PATCH('/api/v1/config/{domain}', { params: { path: { domain: 'org_profile' } }, body: data }).then(r => r.data)

  • [ ] Step 7: Migrate PlatformDataPage.tsx

Same pattern as OrgSettingsPage — swap configApi for typed api calls with the domain as a path param.

  • [ ] Step 8: Migrate PreferencesPage.tsx

Same pattern — swap configApi.getConfig('notifications') etc. for typed api.GET calls.

  • [ ] Step 9: Migrate MarketplacePage.tsx

Replace: - substrateApi.listMarketplaceCatalog()api.GET('/api/v1/marketplace/catalog').then(r => r.data) - substrateApi.listInstalledMarketplaceModules()api.GET('/api/v1/marketplace/installed').then(r => r.data)

  • [ ] Step 10: Migrate SettingsPage.tsx

This is the largest page (~3000 lines). Replace all substrateApi.* calls: - Token operations: listApiTokens, createApiToken, rotateApiToken, revokeApiToken - Marketplace operations: listMarketplaceCatalog, listInstalledMarketplaceModules, purchaseMarketplaceModule, installMarketplaceModule, updateInstalledMarketplaceModule, configureInstalledMarketplaceModule - Connector operations: listConnectorInstances, upsertConnectorConfig, runConnectorSync - Policy operations: listPolicyPackBindings, updatePolicyPackBinding - Team admin: updateTeamMemberRole, updateTeamMemberPermissions

All use the same pattern: replace substrateApi.methodName(args) with the corresponding api.METHOD('/api/v1/path', { body/params }).

Also remove the type re-exports from substrateApi — import types directly from '../types/api.generated' instead.

  • [ ] Step 11: Verify build

cd /home/dany/substrate/ui && npm run build
Expected: TypeScript compilation succeeds.

  • [ ] Step 12: Commit
cd /home/dany/substrate
git add ui/src/pages/
git commit -m "refactor: migrate all settings pages to typed openapi-fetch client"

Task 9: Delete Dead Files

Remove mock data files, hand-written API clients, and old generated types that are no longer imported anywhere.

Files to delete: - ui/src/api/substrateApi.ts - ui/src/api/billingApi.ts - ui/src/api/configApi.ts - ui/src/api/licensingApi.ts - ui/src/api/orgApi.ts - ui/src/api/pluginsApi.ts - ui/src/data/dashboard.json - ui/src/data/communities.json - ui/src/data/graph.json - ui/src/data/memory.json - ui/src/data/search.json - ui/src/data/queue.json - ui/src/data/simulation.json - ui/src/data/pull-requests.json - ui/src/data/notifications.json - ui/src/data/policies.json - ui/src/data/engines.json - ui/src/types/openapi.generated.ts (replaced by api.generated.ts) - api/openapi.yml (replaced by openapi.json)

Files to keep: - ui/src/data/navigation.json — UI structure - ui/src/data/settings.json — settings layout - ui/src/data/settings-org-fields.json — form field definitions - ui/src/data/settings-platform-domains.json — platform config domains - ui/src/data/users.json — dev-mode users - ui/src/data/pull-request-blast-radius.json — local SVG template (check if still imported)

  • [ ] Step 1: Verify no remaining imports of deleted files

cd /home/dany/substrate/ui
grep -r "from.*substrateApi" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*billingApi" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*configApi" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*licensingApi" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*orgApi" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*pluginsApi" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*openapi\.generated" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*data/dashboard" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*data/communities" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*data/graph" src/ --include="*.ts" --include="*.tsx" || echo "clean"
grep -r "from.*data/engines" src/ --include="*.ts" --include="*.tsx" || echo "clean"
Expected: All "clean". If any imports remain, fix them first.

  • [ ] Step 2: Delete API client files
cd /home/dany/substrate
rm ui/src/api/substrateApi.ts ui/src/api/billingApi.ts ui/src/api/configApi.ts ui/src/api/licensingApi.ts ui/src/api/orgApi.ts ui/src/api/pluginsApi.ts
  • [ ] Step 3: Delete mock data files
rm ui/src/data/dashboard.json ui/src/data/communities.json ui/src/data/graph.json ui/src/data/memory.json ui/src/data/search.json ui/src/data/queue.json ui/src/data/simulation.json ui/src/data/pull-requests.json ui/src/data/notifications.json ui/src/data/policies.json ui/src/data/engines.json
  • [ ] Step 4: Delete old generated types and spec
rm ui/src/types/openapi.generated.ts
rm -f api/openapi.yml
  • [ ] Step 5: Verify build still passes

cd /home/dany/substrate/ui && npm run build
Expected: Clean build with no errors.

  • [ ] Step 6: Run backend tests

cd /home/dany/substrate/backend && uv run pytest tests/ -v --tb=short
Expected: All PASS.

  • [ ] Step 7: Commit
cd /home/dany/substrate
git add -A
git commit -m "chore: delete mock data files, hand-written API clients, and old generated types"