-
Notifications
You must be signed in to change notification settings - Fork 2
feat(management): add FastAPI routes and dependency injection (AIHCM-185) #303
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 5 commits
954a84e
d296a87
5103aa6
724aa0f
cfce711
74e6ab0
634108b
5b7d0a6
800e749
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,22 @@ | ||
| """Management presentation layer. | ||
|
|
||
| Aggregates sub-routers for knowledge graphs and data sources, | ||
| exporting a single management_router for registration in main.py. | ||
| """ | ||
|
|
||
| from __future__ import annotations | ||
|
|
||
| from fastapi import APIRouter | ||
|
|
||
| from management.presentation.data_sources.routes import router as ds_router | ||
| from management.presentation.knowledge_graphs.routes import router as kg_router | ||
|
|
||
| management_router = APIRouter( | ||
| prefix="/management", | ||
| tags=["management"], | ||
| ) | ||
|
|
||
| management_router.include_router(kg_router) | ||
| management_router.include_router(ds_router) | ||
|
|
||
| __all__ = ["management_router"] |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1 @@ | ||
| """Data source presentation sub-package.""" |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,155 @@ | ||
| """Request and response models for Data Source API endpoints.""" | ||
|
|
||
| from __future__ import annotations | ||
|
|
||
| from datetime import datetime | ||
|
|
||
| from pydantic import BaseModel, Field | ||
|
|
||
| from management.domain.aggregates import DataSource | ||
| from management.domain.entities import DataSourceSyncRun | ||
|
|
||
|
|
||
| class CreateDataSourceRequest(BaseModel): | ||
| """Request to create a data source. | ||
|
|
||
| Attributes: | ||
| name: Data source name (1-100 characters) | ||
| adapter_type: Adapter type string (validated against DataSourceAdapterType in route) | ||
| connection_config: Key-value connection configuration | ||
| credentials: Optional write-only credentials (never returned in responses) | ||
| """ | ||
|
|
||
| name: str = Field(min_length=1, max_length=100) | ||
| adapter_type: str | ||
| connection_config: dict[str, str] | ||
| credentials: dict[str, str] | None = None | ||
|
|
||
|
|
||
| class UpdateDataSourceRequest(BaseModel): | ||
| """Request to partially update a data source. | ||
|
|
||
| Attributes: | ||
| name: Optional new name (1-100 characters) | ||
| connection_config: Optional new connection configuration | ||
| credentials: Optional new credentials (write-only) | ||
| """ | ||
|
|
||
| name: str | None = Field(default=None, min_length=1, max_length=100) | ||
| connection_config: dict[str, str] | None = None | ||
| credentials: dict[str, str] | None = None | ||
|
|
||
|
|
||
| class DataSourceResponse(BaseModel): | ||
| """Response containing data source details. | ||
|
|
||
| Credentials are never returned. Instead, has_credentials indicates | ||
| whether credentials have been configured. | ||
|
|
||
| Attributes: | ||
| id: Data source ID (ULID) | ||
| knowledge_graph_id: Parent knowledge graph ID | ||
| tenant_id: Tenant ID this data source belongs to | ||
| name: Data source name | ||
| adapter_type: Adapter type string | ||
| connection_config: Connection configuration key-value pairs | ||
| has_credentials: Whether credentials are configured | ||
| schedule_type: Schedule type (manual, cron, interval) | ||
| schedule_value: Schedule expression (None for manual) | ||
| last_sync_at: Last successful sync timestamp | ||
| created_at: Creation timestamp | ||
| updated_at: Last update timestamp | ||
| """ | ||
|
|
||
| id: str | ||
| knowledge_graph_id: str | ||
| tenant_id: str | ||
| name: str | ||
| adapter_type: str | ||
| connection_config: dict[str, str] | ||
| has_credentials: bool | ||
| schedule_type: str | ||
| schedule_value: str | None | ||
| last_sync_at: datetime | None | ||
| created_at: datetime | ||
| updated_at: datetime | ||
|
|
||
| @classmethod | ||
| def from_domain(cls, ds: DataSource) -> DataSourceResponse: | ||
| """Convert domain DataSource aggregate to API response. | ||
|
|
||
| Args: | ||
| ds: DataSource domain aggregate | ||
|
|
||
| Returns: | ||
| DataSourceResponse with data source details | ||
| """ | ||
| return cls( | ||
| id=ds.id.value, | ||
| knowledge_graph_id=ds.knowledge_graph_id, | ||
| tenant_id=ds.tenant_id, | ||
| name=ds.name, | ||
| adapter_type=ds.adapter_type.value, | ||
| connection_config=ds.connection_config, | ||
| has_credentials=ds.credentials_path is not None, | ||
| schedule_type=ds.schedule.schedule_type.value, | ||
| schedule_value=ds.schedule.value, | ||
| last_sync_at=ds.last_sync_at, | ||
| created_at=ds.created_at, | ||
| updated_at=ds.updated_at, | ||
| ) | ||
|
|
||
|
|
||
| class DataSourceListResponse(BaseModel): | ||
| """Response containing a paginated list of data sources. | ||
|
|
||
| Attributes: | ||
| items: List of data source details | ||
| total: Total number of data sources (before pagination) | ||
| offset: Number of items skipped | ||
| limit: Maximum number of items returned | ||
| """ | ||
|
|
||
| items: list[DataSourceResponse] | ||
| total: int | ||
| offset: int | ||
| limit: int | ||
|
|
||
|
|
||
| class SyncRunResponse(BaseModel): | ||
| """Response containing sync run details. | ||
|
|
||
| Attributes: | ||
| id: Sync run ID | ||
| data_source_id: Data source this sync belongs to | ||
| status: Sync run status (pending, running, completed, failed) | ||
| started_at: Sync start timestamp | ||
| completed_at: Sync completion timestamp (None if not complete) | ||
| created_at: Record creation timestamp | ||
| """ | ||
|
|
||
| id: str | ||
| data_source_id: str | ||
| status: str | ||
| started_at: datetime | ||
| completed_at: datetime | None | ||
| created_at: datetime | ||
|
|
||
| @classmethod | ||
| def from_domain(cls, sync_run: DataSourceSyncRun) -> SyncRunResponse: | ||
| """Convert domain DataSourceSyncRun entity to API response. | ||
|
|
||
| Args: | ||
| sync_run: DataSourceSyncRun domain entity | ||
|
|
||
| Returns: | ||
| SyncRunResponse with sync run details | ||
| """ | ||
| return cls( | ||
| id=sync_run.id, | ||
| data_source_id=sync_run.data_source_id, | ||
| status=sync_run.status, | ||
| started_at=sync_run.started_at, | ||
| completed_at=sync_run.completed_at, | ||
| created_at=sync_run.created_at, | ||
|
Comment on lines
+136
to
+156
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Avoid making raw sync failure text part of the public API.
🤖 Prompt for AI Agents |
||
| ) | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Avoid writing secrets before the database write is known to succeed.
At Line 167 and Line 339,
_secret_store.store()runs before the database work has been flushed/committed. Ifsave()or commit then fails, create leaves an orphaned secret and update can rotate credentials even though the request returns an error. It also keeps the transaction open across external I/O. Flush first, then write the secret, and add compensation if anything after the secret write fails.Also applies to: 337-347
🤖 Prompt for AI Agents