diff --git a/.env.test b/.env.test deleted file mode 100644 index 842950d2..00000000 --- a/.env.test +++ /dev/null @@ -1,18 +0,0 @@ -# Test Environment Configuration -# Auto-generated from Docker Compose stack -# DO NOT commit this file - regenerate with: bun run test:env - -# Supabase API URL (Kong gateway - provides /rest/v1/ routing to PostgREST) -SUPABASE_URL=http://localhost:55066 - -# Supabase Keys (matching docker-compose.test.yml JWT secret) -SUPABASE_SERVICE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU -SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0 - -# Direct PostgreSQL connection for migrations (bypasses PostgREST) -DB_HOST=localhost -DB_PORT=55063 -DB_NAME=postgres -DB_USER=postgres -DB_PASSWORD=postgres -DATABASE_URL=postgresql://postgres:postgres@localhost:55063/postgres diff --git a/README.md b/README.md index 007e8ae5..18eca476 100644 --- a/README.md +++ b/README.md @@ -1,368 +1,276 @@ -# KotaDB +# KotaDB - Self-Hosted Code Intelligence Engine -KotaDB is the indexing and query layer for CLI Agents like Claude Code and Codex. This project exposes a -lightweight HTTP interface for triggering repository indexing jobs and performing code search backed by -Supabase (PostgreSQL). Development is done autonomously through AI developer workflows via the `automation/adws/` automation scripts. +**Lightweight MCP server for code indexing and search, powered by Bun + PostgreSQL** -## Getting Started +KotaDB is a production-ready code intelligence platform designed for AI developer workflows. It provides fast, semantic code search with dependency graph analysis through a standards-based MCP interface and REST API. Self-host KotaDB to power your own AI coding tools, or use it as a learning resource for building production LLM infrastructure. + +## Features + +- **Code Indexing**: Automated repository cloning and file extraction with batch processing +- **Semantic Search**: Fast full-text search with context snippets and project filtering +- **Dependency Analysis**: Impact analysis, test scope discovery, circular dependency detection +- **MCP Protocol**: Standards-based interface for Claude Code and other MCP clients +- **Multi-Tenant**: Row-level security with PostgreSQL RLS for user isolation +- **Rate Limiting**: Tier-based request limits with sliding window enforcement +- **Job Queue**: Asynchronous indexing with pg-boss for reliable background processing +- **AI Developer Workflows**: Autonomous development automation via Python agents + +## Quick Start ### Prerequisites - [Bun](https://bun.sh) v1.1+ -- [Supabase](https://supabase.com) account with project created (see `docs/supabase-setup.md`) +- [Docker Desktop](https://www.docker.com/products/docker-desktop) (for local Supabase) +- [Supabase CLI](https://supabase.com/docs/guides/cli) (optional, for local development) -### Install dependencies +### Installation ```bash -cd app && bun install -``` +# Clone the repository +git clone https://github.com/jayminwest/kotadb.git +cd kotadb -### Configure Supabase - -1. Create a Supabase project at https://supabase.com/dashboard -2. Copy `.env.sample` to `.env` and add your Supabase credentials: - - `SUPABASE_URL` - Your project URL - - `SUPABASE_SERVICE_KEY` - Service role key (keep secret) - - `SUPABASE_ANON_KEY` - Anonymous/public key -3. Run database migrations (see `docs/supabase-setup.md` for details) +# Install dependencies +cd app && bun install -For detailed setup instructions, see `docs/supabase-setup.md`. +# Configure environment +cp .env.sample .env +# Edit .env with your Supabase credentials (see Self-Hosting guide below) -### Start the API server +# Run database migrations +cd app && bunx supabase db push -```bash +# Start the server cd app && bun run src/index.ts ``` -The server listens on port `3000` by default. Override with `PORT=4000 cd app && bun run src/index.ts`. +The server listens on port `3000` by default. Override with `PORT=4000`. -### Useful scripts +## Self-Hosting Guide -- `cd app && bun --watch src/index.ts` – Start the server in watch mode for local development. -- `cd app && bun test` – Run the Bun test suite. -- `cd app && bunx tsc --noEmit` – Type-check the project. +KotaDB is designed to be self-hosted with minimal configuration. Follow these steps: -## Web Application +### 1. Supabase Setup -KotaDB includes a Next.js web interface for code search and repository indexing. - -### Start the web app +**Option A: Supabase Local (Development)** ```bash -# Install dependencies (from repository root) -bun install +# Start Supabase Local with Docker +cd app && bunx supabase start -# Start development server -cd web && bun run dev +# The CLI will output your local credentials: +# - API URL: http://localhost:54321 +# - Service Role Key: eyJhbG... ``` -The web app will be available at `http://localhost:3001`. - -**Features:** -- Code search with context snippets -- Repository indexing interface -- Rate limit quota tracking -- Type-safe API integration with shared types +**Option B: Supabase Cloud (Production)** -See `web/README.md` for detailed documentation. +1. Create a project at [supabase.com/dashboard](https://supabase.com/dashboard) +2. Go to Project Settings → API to get your credentials: + - `SUPABASE_URL`: Your project URL + - `SUPABASE_SERVICE_KEY`: Service role key (keep secret) + - `SUPABASE_ANON_KEY`: Anonymous/public key -### Running Tests +### 2. Environment Configuration -KotaDB uses real PostgreSQL database connections for testing (no mocks). The test environment uses **Docker Compose** with isolated services to ensure exact parity between local and CI testing environments, with full project isolation to prevent port conflicts. +Copy `.env.sample` to `.env` and configure: -**Prerequisites:** Install [Docker Desktop](https://www.docker.com/products/docker-desktop) ```bash -# Verify Docker is installed and running -docker --version +# Required: Supabase credentials +SUPABASE_URL=https://your-project-id.supabase.co +SUPABASE_ANON_KEY=your-anon-key-here +SUPABASE_SERVICE_KEY=your-service-role-key-here +SUPABASE_DB_URL=postgresql://postgres:[PASSWORD]@db.[PROJECT_REF].supabase.co:5432/postgres + +# Optional: Billing features (disabled by default) +ENABLE_BILLING=false + +# Optional: GitHub integration (for webhook auto-indexing) +GITHUB_WEBHOOK_SECRET=your-webhook-secret-here ``` -**Quick Start:** +### 3. Database Migrations + +Apply migrations to set up tables, RLS policies, and indexes: + ```bash -# First-time setup: Start Docker Compose services and auto-generate .env.test -cd app && bun run test:setup +cd app && bunx supabase db push +``` -# Run tests -cd app && bun test +Migrations are located in `app/supabase/migrations/`. -# Reset database if needed -cd app && bun run test:reset +### 4. Start the Server -# Stop services when done -cd app && bun run test:teardown +```bash +cd app && bun run src/index.ts ``` -**Note:** The `.env.test` file is auto-generated from Docker Compose container ports and should not be committed to git. +Verify the server is running: -**Project Isolation:** Each test run uses a unique Docker Compose project name (e.g., `kotadb-test-1234567890-98765`), enabling multiple projects or branches to run tests simultaneously without port conflicts. +```bash +curl http://localhost:3000/health +``` -**CI Testing:** GitHub Actions CI uses the same Docker Compose environment with unique project names, ensuring tests run against identical infrastructure locally and in CI (PostgreSQL + PostgREST + Kong + Auth). See `.github/workflows/app-ci.yml` for details. +## Billing Features -For detailed testing setup and troubleshooting, see [`docs/testing-setup.md`](docs/testing-setup.md). +**Note:** Billing features are **disabled by default** in self-hosted deployments. Set `ENABLE_BILLING=true` in your environment to enable Stripe subscription billing. -## API Highlights +When billing is disabled: +- All billing endpoints return `501 Not Implemented` +- Rate limits default to free tier (100 requests/hour) +- Subscription management is unavailable -### REST Endpoints +To enable billing, configure Stripe credentials in your `.env`: -- `GET /health` – Simple heartbeat endpoint. -- `POST /index` – Queue a repository for indexing (body: `{ "repository": "org/repo", "localPath": "./repo" }`). -- `GET /search?term=foo` – Search for files containing `foo`. Optional `project` and `limit` parameters. -- `GET /files/recent` – Recent indexing results. +```bash +ENABLE_BILLING=true +STRIPE_SECRET_KEY=sk_test_... +STRIPE_WEBHOOK_SECRET=whsec_... +STRIPE_SOLO_PRICE_ID=price_... +STRIPE_TEAM_PRICE_ID=price_... +``` -The indexer clones repositories automatically when a `localPath` is not provided. Override the default GitHub clone source by exporting `KOTA_GIT_BASE_URL` (for example, your self-hosted Git service). +See `app/.env.sample` for complete Stripe configuration documentation. -### Rate Limiting +## MCP Integration -All authenticated endpoints enforce tier-based rate limiting to prevent API abuse: +KotaDB implements the [Model Context Protocol](https://modelcontextprotocol.io) for seamless integration with AI coding tools like Claude Code. -**Tier Limits** (requests per hour): -- **Free**: 100 requests/hour -- **Solo**: 1,000 requests/hour -- **Team**: 10,000 requests/hour +### Using KotaDB with Claude Code -**Response Headers** (included in all authenticated responses): -``` -X-RateLimit-Limit: 100 -X-RateLimit-Remaining: 95 -X-RateLimit-Reset: 1728475200 -``` +Add KotaDB as an MCP server in your Claude Code configuration: -**Rate Limit Exceeded** (429 response): ```json { - "error": "Rate limit exceeded", - "retryAfter": 3456 + "mcpServers": { + "kotadb": { + "command": "bunx", + "args": ["@modelcontextprotocol/server-http", "http://localhost:3000/mcp"], + "env": { + "KOTADB_API_KEY": "your-api-key-here" + } + } + } } ``` -Response includes headers: -- `X-RateLimit-Limit` – Total requests allowed per hour for your tier -- `X-RateLimit-Remaining` – Requests remaining in current window -- `X-RateLimit-Reset` – Unix timestamp when the limit resets -- `Retry-After` – Seconds until you can retry (429 responses only) - -Rate limits reset at the top of each hour. The `/health` endpoint is exempt from rate limiting. - -### MCP Protocol Endpoint - -KotaDB supports the [Model Context Protocol (MCP)](https://modelcontextprotocol.io) for standardized agent integration. The MCP endpoint enables CLI agents like Claude Code to discover and use KotaDB's capabilities automatically. - -**Endpoint:** `POST /mcp` - -**Required Headers:** -- `Authorization`: Bearer token with valid API key -- `Accept: application/json, text/event-stream` **(CRITICAL: Both types required)** -- `MCP-Protocol-Version: 2025-06-18` -- `Content-Type: application/json` +### Available MCP Tools -> **Note**: The Accept header MUST include both `application/json` and `text/event-stream`. Missing either will result in HTTP 406 "Not Acceptable". See [Migration Guide](docs/migration/v0.1.0-to-v0.1.1.md) for details. +- `search_code`: Search indexed files for a specific term +- `index_repository`: Index a git repository by cloning/updating it +- `list_recent_files`: List recently indexed files, ordered by timestamp +- `search_dependencies`: Find files that depend on or are depended on by a target file -**Example: Initialize Handshake** +See `docs/guides/mcp-claude-code-integration.md` for detailed integration instructions. -```bash -curl -X POST http://localhost:3000/mcp \ - -H "Content-Type: application/json" \ - -H "Accept: application/json, text/event-stream" \ - -H "MCP-Protocol-Version: 2025-06-18" \ - -H "Authorization: Bearer YOUR_API_KEY" \ - -d '{ - "jsonrpc": "2.0", - "id": 1, - "method": "initialize", - "params": { - "protocolVersion": "2025-06-18", - "capabilities": {}, - "clientInfo": {"name": "my-client", "version": "1.0"} - } - }' -``` +## Testing -**Example: List Available Tools** +KotaDB follows an **antimocking philosophy** - all tests use real Supabase Local database connections for production parity. No mocks, no stubs. ```bash -curl -X POST http://localhost:3000/mcp \ - -H "Content-Type: application/json" \ - -H "Accept: application/json, text/event-stream" \ - -H "MCP-Protocol-Version: 2025-06-18" \ - -H "Authorization: Bearer YOUR_API_KEY" \ - -d '{ - "jsonrpc": "2.0", - "id": 2, - "method": "tools/list", - "params": {} - }' -``` +# First-time setup: Start Docker Compose services +cd app && bun run test:setup -**Example: Search Code** +# Run tests +cd app && bun test -```bash -curl -X POST http://localhost:3000/mcp \ - -H "Content-Type: application/json" \ - -H "Accept: application/json, text/event-stream" \ - -H "MCP-Protocol-Version: 2025-06-18" \ - -H "Authorization: Bearer YOUR_API_KEY" \ - -d '{ - "jsonrpc": "2.0", - "id": 3, - "method": "tools/call", - "params": { - "name": "search_code", - "arguments": {"term": "Router"} - } - }' +# Stop services when done +cd app && bun run test:teardown ``` -**Available MCP Tools:** -- `search_code`: Search indexed code files for a specific term -- `index_repository`: Index a git repository by cloning/updating it -- `list_recent_files`: List recently indexed files -- `search_dependencies`: Search the dependency graph for impact analysis +See `docs/testing-setup.md` for detailed testing documentation. -**Tool: `search_dependencies`** +## API Endpoints -Find files that depend on (dependents) or are depended on by (dependencies) a target file. Useful for: -- **Impact analysis before refactoring**: See what breaks if you change a file -- **Test scope discovery**: Find relevant test files for implementation changes -- **Circular dependency detection**: Identify dependency cycles in your codebase +### REST API -**Parameters:** -- `file_path` (required): Relative file path within repository (e.g., `"src/auth/context.ts"`) -- `direction` (optional): Search direction - `"dependents"`, `"dependencies"`, or `"both"` (default: `"both"`) -- `depth` (optional): Recursion depth for traversal, 1-5 (default: `1`). Higher values find indirect relationships. -- `include_tests` (optional): Include test files in results (default: `true`) -- `repository` (optional): Repository ID to search within (auto-detected if omitted) +- `GET /health` - Health check endpoint (includes queue metrics) +- `POST /index` - Queue a repository for indexing +- `GET /search?term=` - Search indexed files +- `GET /files/recent` - List recently indexed files +- `POST /mcp` - MCP protocol endpoint for tool integration +- `POST /api/keys/generate` - Generate API key for authenticated user +- `GET /api/keys/validate` - Validate API key or JWT token -**Example: Find what breaks if you change a file** +### Webhooks -```bash -curl -X POST http://localhost:3000/mcp \ - -H "Content-Type: application/json" \ - -H "Accept: application/json, text/event-stream" \ - -H "MCP-Protocol-Version: 2025-06-18" \ - -H "Authorization: Bearer YOUR_API_KEY" \ - -d '{ - "jsonrpc": "2.0", - "id": 4, - "method": "tools/call", - "params": { - "name": "search_dependencies", - "arguments": { - "file_path": "src/auth/context.ts", - "direction": "dependents", - "depth": 2 - } - } - }' -``` +- `POST /webhooks/github` - GitHub push event webhook (requires `GITHUB_WEBHOOK_SECRET`) +- `POST /webhooks/stripe` - Stripe subscription webhook (only if `ENABLE_BILLING=true`) -**Response Format:** +### Rate Limits -```json -{ - "jsonrpc": "2.0", - "id": 4, - "result": { - "content": [{ - "type": "text", - "text": "{ - \"file_path\": \"src/auth/context.ts\", - \"direction\": \"dependents\", - \"depth\": 2, - \"dependents\": { - \"direct\": [\"src/auth/middleware.ts\", \"src/api/routes.ts\"], - \"indirect\": { - \"src/auth/middleware.ts\": [\"src/index.ts\"] - }, - \"cycles\": [], - \"count\": 3 - } - }" - }] - } -} +- **Free**: 100 requests/hour +- **Solo**: 1,000 requests/hour (requires billing enabled) +- **Team**: 10,000 requests/hour (requires billing enabled) -**Security & Configuration:** +All authenticated endpoints include rate limit headers: -By default, KotaDB only accepts requests from localhost origins. For production deployments: -- Set `KOTA_ALLOWED_ORIGINS` environment variable (comma-separated list of allowed origins) -- Use a reverse proxy with authentication (e.g., nginx with basic auth) -- Bind to localhost only and use network policies to control access +``` +X-RateLimit-Limit: 100 +X-RateLimit-Remaining: 95 +X-RateLimit-Reset: 1728475200 +``` -**Session Management:** +## Project Structure -The optional `Mcp-Session-Id` header is validated but not currently used for state management. Future versions may support persistent sessions with server-side storage. +``` +app/ # Bun + TypeScript API service + src/ + api/ # HTTP routes and database queries + auth/ # Authentication and rate limiting + db/ # Supabase client and migrations + indexer/ # Repository crawling and code extraction + mcp/ # Model Context Protocol implementation + queue/ # pg-boss job queue for async indexing + tests/ # Integration tests (133 tests) + supabase/ # Database migrations and configuration -## Docker & Compose +automation/ # Python AI developer workflows (ADW) + adws/ # Autonomous development agents -Build and run the service in a container: +shared/ # Shared TypeScript types (monorepo) + types/ # API contracts, entities, auth types -```bash -docker compose up dev +.claude/commands/ # Claude Code slash commands and guides ``` -The `dev` and `home` services use the build context from the `app/` directory. A production-flavoured service is available via the `home` target in `docker-compose.yml`. - -## Deployment +## Documentation -For deploying KotaDB to Fly.io (staging or production), see the comprehensive guide at [`docs/deployment.md`](docs/deployment.md). The deployment guide covers: -- Prerequisites and Fly.io authentication -- Staging and production environment setup -- Supabase configuration and secret management -- Health check validation and MCP integration testing -- Troubleshooting common deployment issues +- **Development**: `.claude/commands/app/dev-commands.md` - Quick start and testing +- **Architecture**: `.claude/commands/docs/architecture.md` - Path aliases, shared types +- **Database**: `.claude/commands/docs/database.md` - Schema, RLS policies, migrations +- **MCP Integration**: `docs/guides/mcp-claude-code-integration.md` - Claude Code setup +- **Testing**: `docs/testing-setup.md` - Antimocking philosophy and test infrastructure +- **AI Workflows**: `automation/adws/README.md` - Autonomous development automation -## Project Layout - -``` -app/ # Application layer (TypeScript/Bun API service) - src/ - api/ # HTTP routes and database access - auth/ # Authentication middleware and API key validation - db/ # Supabase client initialization and helpers - indexer/ # Repository crawling, parsing, and extraction utilities - mcp/ # Model Context Protocol (MCP) implementation - types/ # Shared TypeScript types - tests/ # Test suite (133 tests) - package.json # Bun dependencies and scripts - tsconfig.json # TypeScript configuration - Dockerfile # Bun runtime image - supabase/ # Database migrations and configuration - scripts/ # Application-specific bash scripts +## Contributing -web/ # Next.js web application (frontend) - src/ - components/ # React components - pages/ # Next.js pages and API routes - lib/ # Client utilities and API integration - package.json # Frontend dependencies - next.config.js # Next.js configuration +Contributions are welcome! This repository is maintained as an open source core fork, with changes synced from the private development repository. -shared/ # Shared TypeScript types (monorepo) - types/ # API contracts, entities, authentication types +See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines, including: +- Development setup and testing requirements +- Git flow and branch strategy +- Code style and commit message conventions +- Antimocking testing philosophy -automation/ # Agentic layer (Python AI developer workflows) - adws/ # ADW automation scripts and modules - docker/ # ADW-specific Docker images +## Consulting & Support -.claude/commands/ # Claude Code slash commands (see .claude/commands/README.md for organization details) -.github/workflows/ # CI workflows (app-ci.yml for application tests) -docs/ # Documentation (schema, specs, setup guides) -``` +**Need help integrating KotaDB into your AI workflow?** -See `app/README.md` for application-specific quickstart, `web/README.md` for frontend development, and `automation/adws/README.md` for automation workflows. +I provide consulting services for: +- Custom MCP server development +- LLM-powered developer tooling +- Code intelligence infrastructure +- AI agent automation pipelines -## Project Roadmap +**Contact:** Jaymin West +- GitHub: [@jayminwest](https://github.com/jayminwest) +- Email: jaymin@jayminwest.com -For strategic priorities, planned features, and development timeline, see [ROADMAP.md](ROADMAP.md). +**Looking for a hosted solution?** The full-stack web application with authentication, billing, and dashboard is available at [kotadb.io](https://kotadb.io) (private repository). -The roadmap provides: -- Current state and shipped features -- Immediate priorities (Phase 1) -- Medium-term and long-term goals -- Dependencies and blockers -- Key architectural decisions +## License -## Next Steps +MIT License - see [LICENSE](LICENSE) for details. -- Harden repository checkout logic with retry/backoff and temporary workspace isolation. -- Expand `automation/adws/` with runnable automation pipelines. -- Add richer schema migrations for symbols, AST metadata, and search primitives. +Copyright (c) 2024 Jaymin West diff --git a/README.public.md b/README.public.md deleted file mode 100644 index 18eca476..00000000 --- a/README.public.md +++ /dev/null @@ -1,276 +0,0 @@ -# KotaDB - Self-Hosted Code Intelligence Engine - -**Lightweight MCP server for code indexing and search, powered by Bun + PostgreSQL** - -KotaDB is a production-ready code intelligence platform designed for AI developer workflows. It provides fast, semantic code search with dependency graph analysis through a standards-based MCP interface and REST API. Self-host KotaDB to power your own AI coding tools, or use it as a learning resource for building production LLM infrastructure. - -## Features - -- **Code Indexing**: Automated repository cloning and file extraction with batch processing -- **Semantic Search**: Fast full-text search with context snippets and project filtering -- **Dependency Analysis**: Impact analysis, test scope discovery, circular dependency detection -- **MCP Protocol**: Standards-based interface for Claude Code and other MCP clients -- **Multi-Tenant**: Row-level security with PostgreSQL RLS for user isolation -- **Rate Limiting**: Tier-based request limits with sliding window enforcement -- **Job Queue**: Asynchronous indexing with pg-boss for reliable background processing -- **AI Developer Workflows**: Autonomous development automation via Python agents - -## Quick Start - -### Prerequisites - -- [Bun](https://bun.sh) v1.1+ -- [Docker Desktop](https://www.docker.com/products/docker-desktop) (for local Supabase) -- [Supabase CLI](https://supabase.com/docs/guides/cli) (optional, for local development) - -### Installation - -```bash -# Clone the repository -git clone https://github.com/jayminwest/kotadb.git -cd kotadb - -# Install dependencies -cd app && bun install - -# Configure environment -cp .env.sample .env -# Edit .env with your Supabase credentials (see Self-Hosting guide below) - -# Run database migrations -cd app && bunx supabase db push - -# Start the server -cd app && bun run src/index.ts -``` - -The server listens on port `3000` by default. Override with `PORT=4000`. - -## Self-Hosting Guide - -KotaDB is designed to be self-hosted with minimal configuration. Follow these steps: - -### 1. Supabase Setup - -**Option A: Supabase Local (Development)** - -```bash -# Start Supabase Local with Docker -cd app && bunx supabase start - -# The CLI will output your local credentials: -# - API URL: http://localhost:54321 -# - Service Role Key: eyJhbG... -``` - -**Option B: Supabase Cloud (Production)** - -1. Create a project at [supabase.com/dashboard](https://supabase.com/dashboard) -2. Go to Project Settings → API to get your credentials: - - `SUPABASE_URL`: Your project URL - - `SUPABASE_SERVICE_KEY`: Service role key (keep secret) - - `SUPABASE_ANON_KEY`: Anonymous/public key - -### 2. Environment Configuration - -Copy `.env.sample` to `.env` and configure: - -```bash -# Required: Supabase credentials -SUPABASE_URL=https://your-project-id.supabase.co -SUPABASE_ANON_KEY=your-anon-key-here -SUPABASE_SERVICE_KEY=your-service-role-key-here -SUPABASE_DB_URL=postgresql://postgres:[PASSWORD]@db.[PROJECT_REF].supabase.co:5432/postgres - -# Optional: Billing features (disabled by default) -ENABLE_BILLING=false - -# Optional: GitHub integration (for webhook auto-indexing) -GITHUB_WEBHOOK_SECRET=your-webhook-secret-here -``` - -### 3. Database Migrations - -Apply migrations to set up tables, RLS policies, and indexes: - -```bash -cd app && bunx supabase db push -``` - -Migrations are located in `app/supabase/migrations/`. - -### 4. Start the Server - -```bash -cd app && bun run src/index.ts -``` - -Verify the server is running: - -```bash -curl http://localhost:3000/health -``` - -## Billing Features - -**Note:** Billing features are **disabled by default** in self-hosted deployments. Set `ENABLE_BILLING=true` in your environment to enable Stripe subscription billing. - -When billing is disabled: -- All billing endpoints return `501 Not Implemented` -- Rate limits default to free tier (100 requests/hour) -- Subscription management is unavailable - -To enable billing, configure Stripe credentials in your `.env`: - -```bash -ENABLE_BILLING=true -STRIPE_SECRET_KEY=sk_test_... -STRIPE_WEBHOOK_SECRET=whsec_... -STRIPE_SOLO_PRICE_ID=price_... -STRIPE_TEAM_PRICE_ID=price_... -``` - -See `app/.env.sample` for complete Stripe configuration documentation. - -## MCP Integration - -KotaDB implements the [Model Context Protocol](https://modelcontextprotocol.io) for seamless integration with AI coding tools like Claude Code. - -### Using KotaDB with Claude Code - -Add KotaDB as an MCP server in your Claude Code configuration: - -```json -{ - "mcpServers": { - "kotadb": { - "command": "bunx", - "args": ["@modelcontextprotocol/server-http", "http://localhost:3000/mcp"], - "env": { - "KOTADB_API_KEY": "your-api-key-here" - } - } - } -} -``` - -### Available MCP Tools - -- `search_code`: Search indexed files for a specific term -- `index_repository`: Index a git repository by cloning/updating it -- `list_recent_files`: List recently indexed files, ordered by timestamp -- `search_dependencies`: Find files that depend on or are depended on by a target file - -See `docs/guides/mcp-claude-code-integration.md` for detailed integration instructions. - -## Testing - -KotaDB follows an **antimocking philosophy** - all tests use real Supabase Local database connections for production parity. No mocks, no stubs. - -```bash -# First-time setup: Start Docker Compose services -cd app && bun run test:setup - -# Run tests -cd app && bun test - -# Stop services when done -cd app && bun run test:teardown -``` - -See `docs/testing-setup.md` for detailed testing documentation. - -## API Endpoints - -### REST API - -- `GET /health` - Health check endpoint (includes queue metrics) -- `POST /index` - Queue a repository for indexing -- `GET /search?term=` - Search indexed files -- `GET /files/recent` - List recently indexed files -- `POST /mcp` - MCP protocol endpoint for tool integration -- `POST /api/keys/generate` - Generate API key for authenticated user -- `GET /api/keys/validate` - Validate API key or JWT token - -### Webhooks - -- `POST /webhooks/github` - GitHub push event webhook (requires `GITHUB_WEBHOOK_SECRET`) -- `POST /webhooks/stripe` - Stripe subscription webhook (only if `ENABLE_BILLING=true`) - -### Rate Limits - -- **Free**: 100 requests/hour -- **Solo**: 1,000 requests/hour (requires billing enabled) -- **Team**: 10,000 requests/hour (requires billing enabled) - -All authenticated endpoints include rate limit headers: - -``` -X-RateLimit-Limit: 100 -X-RateLimit-Remaining: 95 -X-RateLimit-Reset: 1728475200 -``` - -## Project Structure - -``` -app/ # Bun + TypeScript API service - src/ - api/ # HTTP routes and database queries - auth/ # Authentication and rate limiting - db/ # Supabase client and migrations - indexer/ # Repository crawling and code extraction - mcp/ # Model Context Protocol implementation - queue/ # pg-boss job queue for async indexing - tests/ # Integration tests (133 tests) - supabase/ # Database migrations and configuration - -automation/ # Python AI developer workflows (ADW) - adws/ # Autonomous development agents - -shared/ # Shared TypeScript types (monorepo) - types/ # API contracts, entities, auth types - -.claude/commands/ # Claude Code slash commands and guides -``` - -## Documentation - -- **Development**: `.claude/commands/app/dev-commands.md` - Quick start and testing -- **Architecture**: `.claude/commands/docs/architecture.md` - Path aliases, shared types -- **Database**: `.claude/commands/docs/database.md` - Schema, RLS policies, migrations -- **MCP Integration**: `docs/guides/mcp-claude-code-integration.md` - Claude Code setup -- **Testing**: `docs/testing-setup.md` - Antimocking philosophy and test infrastructure -- **AI Workflows**: `automation/adws/README.md` - Autonomous development automation - -## Contributing - -Contributions are welcome! This repository is maintained as an open source core fork, with changes synced from the private development repository. - -See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines, including: -- Development setup and testing requirements -- Git flow and branch strategy -- Code style and commit message conventions -- Antimocking testing philosophy - -## Consulting & Support - -**Need help integrating KotaDB into your AI workflow?** - -I provide consulting services for: -- Custom MCP server development -- LLM-powered developer tooling -- Code intelligence infrastructure -- AI agent automation pipelines - -**Contact:** Jaymin West -- GitHub: [@jayminwest](https://github.com/jayminwest) -- Email: jaymin@jayminwest.com - -**Looking for a hosted solution?** The full-stack web application with authentication, billing, and dashboard is available at [kotadb.io](https://kotadb.io) (private repository). - -## License - -MIT License - see [LICENSE](LICENSE) for details. - -Copyright (c) 2024 Jaymin West diff --git a/app/package.json b/app/package.json index 98cd968c..b5091166 100644 --- a/app/package.json +++ b/app/package.json @@ -47,7 +47,7 @@ "@types/supertest": "^6.0.3", "bun-types": "^1.1.10", "husky": "^9.1.7", - "lint-staged": "^16.2.4", + "lint-staged": "^16.2.7", "supertest": "^7.1.4", "typescript": "^5.9.3" } diff --git a/docs/vision/2025-10-13-multi-agent-framework-investigation.md b/docs/vision/2025-10-13-multi-agent-framework-investigation.md deleted file mode 100644 index 88deca6e..00000000 --- a/docs/vision/2025-10-13-multi-agent-framework-investigation.md +++ /dev/null @@ -1,607 +0,0 @@ -# Multi-Agent Collaboration Framework Investigation - -**Date**: October 13, 2025 -**Author**: Claude Code (via /workflows:prime investigation) -**Status**: Strategic Vision Document - ---- - -## Executive Summary - -This document outlines the strategic opportunity to position KotaDB as a **multi-agent collaboration framework** rather than a standalone code search tool. Following a comprehensive investigation of the codebase, automation infrastructure, and MCP ecosystem, we've identified that KotaDB has already built the foundational components for a production-grade multi-agent development platform. - -**Key Finding**: The code search and indexing capabilities should be reframed as the **memory layer** for autonomous agent workflows, not the primary product. The real innovation is the orchestration infrastructure that enables multiple AI agents to collaborate on complex software development tasks. - ---- - -## Current State Analysis - -### What We Have Today - -#### 1. Production MCP Server (app/src/mcp/) -- HTTP endpoint with Express.js + `@modelcontextprotocol/sdk` (v1.20+) -- Three tools: `search_code`, `index_repository`, `list_recent_files` -- Tier-based authentication (free/solo/team) with rate limiting -- Row-level security via Supabase for multi-tenant isolation -- 122/132 tests passing (92.4% coverage) - -#### 2. AI Developer Workflow System (automation/adws/) -- **Python orchestration layer** invoking Claude Code CLI agents -- **Five SDLC phases**: plan → build → test → review → document -- **65+ GitHub issues** processed autonomously in production -- **Worktree isolation** for concurrent agent execution -- **Multi-trigger system**: GitHub issues, webhooks, home server queue -- **State persistence** via JSON snapshots (`agents//adw_state.json`) - -#### 3. Infrastructure Components -- **Git worktree management** (`adw_modules/git_ops.py`) -- **Slash command system** (30+ templates in `.claude/commands/`) -- **Agent catalog**: classifier, planner, implementor, reviewer, documenter, patcher -- **Validation suite**: Bun lint/typecheck/test/build with lockfile detection -- **Home server integration**: Tailscale-connected task queue for distributed agents - -### What This Actually Means - -**We've built a multi-agent framework disguised as a code search tool.** - -The code indexing isn't the product—it's the **memory layer** that enables agents to understand codebases. The real product is the orchestration infrastructure that coordinates multiple specialized agents to autonomously complete complex software development workflows. - ---- - -## Market Opportunity - -### Current Landscape (October 2025) - -**MCP Adoption Accelerating**: -- OpenAI integrated MCP (March 2025) -- Anthropic launched MCP (November 2024) -- 2,000+ MCP servers discovered in the wild -- SDKs available: TypeScript, Python, C#, Java - -**Critical Gap Identified**: -- **Security**: Research shows ~2,000 MCP servers lack authentication -- **Fragmentation**: Every team building custom agent orchestration -- **No Standards**: Ad-hoc protocols for agent-to-agent communication -- **No Platform**: Infrastructure for multi-agent collaboration doesn't exist - -**KotaDB's Unique Position**: -- ✅ First-mover on authenticated MCP infrastructure -- ✅ Proven patterns from 65+ autonomous issues in production -- ✅ Anti-mocking philosophy = reliable execution -- ✅ Row-level security = native multi-tenancy - -### Competitive Analysis - -#### vs. Existing MCP Servers -| Aspect | Others | KotaDB | -|--------|--------|--------| -| **Security** | No auth (2k+ servers) | API keys + rate limits + RLS | -| **Scale** | Single-agent tools | Multi-agent orchestration | -| **Production** | Prototypes/demos | 65+ issues automated | -| **Scope** | Standalone tools | Full SDLC pipeline | - -#### vs. Agent Frameworks (LangChain, AutoGPT) -| Aspect | General Frameworks | KotaDB | -|--------|-------------------|--------| -| **Focus** | General AI agents | **Software development agents** | -| **Isolation** | In-memory state | Git worktrees + persistent state | -| **Collaboration** | Custom protocols | **MCP standard** | -| **Infrastructure** | BYO | Batteries included | - -#### vs. CI/CD Platforms (GitHub Actions, CircleCI) -| Aspect | CI/CD | KotaDB | -|--------|-------|--------| -| **Execution** | YAML configs | **Autonomous agent decisions** | -| **Iteration** | Manual fixes | **Agent self-correction loops** | -| **Context** | Limited (env vars) | **Full codebase understanding** | -| **Collaboration** | Sequential steps | **Concurrent multi-agent** | - ---- - -## Technical Architecture: KotaDB as Framework - -### Conceptual Model - -``` -┌─────────────────────────────────────────────────────────────┐ -│ KotaDB Platform │ -├─────────────────────────────────────────────────────────────┤ -│ │ -│ ┌──────────────────────────────────────────────────────┐ │ -│ │ Agent Communication Layer (MCP) │ │ -│ │ • Tool registry + discovery │ │ -│ │ • Authentication + rate limiting │ │ -│ │ • Request routing + load balancing │ │ -│ └──────────────────────────────────────────────────────┘ │ -│ │ -│ ┌──────────────────────────────────────────────────────┐ │ -│ │ Workflow Orchestration Engine │ │ -│ │ • Phase execution (plan/build/test/review/docs) │ │ -│ │ • State management + persistence │ │ -│ │ • Error handling + retry logic │ │ -│ └──────────────────────────────────────────────────────┘ │ -│ │ -│ ┌──────────────────────────────────────────────────────┐ │ -│ │ Resource Management Layer │ │ -│ │ • Git worktree isolation │ │ -│ │ • Concurrent execution coordination │ │ -│ │ • Cleanup + garbage collection │ │ -│ └──────────────────────────────────────────────────────┘ │ -│ │ -│ ┌──────────────────────────────────────────────────────┐ │ -│ │ Knowledge & Context Layer │ │ -│ │ • Code indexing + search │ │ -│ │ • Dependency graph analysis │ │ -│ │ • Semantic code understanding │ │ -│ └──────────────────────────────────────────────────────┘ │ -│ │ -│ ┌──────────────────────────────────────────────────────┐ │ -│ │ Integration & Trigger Layer │ │ -│ │ • GitHub webhooks │ │ -│ │ • Home server queue │ │ -│ │ • CLI + API interfaces │ │ -│ └──────────────────────────────────────────────────────┘ │ -│ │ -└─────────────────────────────────────────────────────────────┘ -``` - -### Proposed MCP Server Architecture - -#### ADW Orchestration Server (automation/adws/mcp_server/) - -**Core Tools**: - -```typescript -// Workflow orchestration -{ - name: "adw_run_phase", - description: "Execute ADW workflow phase (plan/build/test/review/document)", - arguments: { - phase: "plan" | "build" | "test" | "review" | "document", - issue_number: string, - adw_id?: string - } -} - -{ - name: "adw_get_state", - description: "Query current workflow state", - arguments: { adw_id: string } -} - -{ - name: "adw_list_workflows", - description: "List available workflows and their status", - arguments: { adw_id?: string } -} - -// Git operations -{ - name: "git_create_worktree", - description: "Create isolated git worktree", - arguments: { - worktree_name: string, - base_branch: string, - base_path?: string - } -} - -{ - name: "git_cleanup_worktree", - description: "Remove worktree and optionally delete branch", - arguments: { - worktree_name: string, - delete_branch?: boolean - } -} - -// Home server integration -{ - name: "homeserver_get_tasks", - description: "Fetch pending tasks from home server", - arguments: { status?: "pending" | "claimed" | "in_progress" } -} - -{ - name: "homeserver_update_task", - description: "Update task status and metadata", - arguments: { - task_id: string, - status: string, - metadata?: object - } -} - -// Validation -{ - name: "bun_validate", - description: "Run validation suite (lint/typecheck/test/build)", - arguments: { cwd?: string } -} - -// Slash commands -{ - name: "adw_execute_command", - description: "Execute slash command with arguments", - arguments: { - command: string, - args: string[], - adw_id?: string - } -} -``` - -### Agent Lifecycle Example - -```python -# 1. Agent Registration -agent = kotadb.register_agent( - name="custom-security-scanner", - tools=["scan_vulnerabilities", "suggest_fixes"], - capabilities={"languages": ["typescript", "python", "rust"]}, - auth_tier="team" -) - -# 2. Tool Publication -@agent.tool("scan_vulnerabilities") -async def scan_vulnerabilities(codebase_path: str): - # Use KotaDB's search to find security patterns - auth_code = await kotadb.search_code( - term="password OR secret OR api_key", - repository=codebase_path - ) - - vulnerabilities = [] - for result in auth_code.results: - issues = await detect_hardcoded_secrets(result.content) - vulnerabilities.extend(issues) - - return {"vulnerabilities": vulnerabilities, "severity": "high"} - -# 3. Workflow Integration -workflow = kotadb.create_workflow("secure-sdlc") -workflow.add_phase("plan", agent="agent-planner") -workflow.add_phase("implement", agent="agent-implementor") -workflow.add_phase("security", agent="custom-security-scanner") -workflow.add_phase("review", agent="agent-reviewer") - -# 4. Execution -result = await kotadb.run_workflow( - workflow="secure-sdlc", - trigger={"type": "github_issue", "issue_number": 123} -) -``` - ---- - -## Implementation Roadmap - -### Phase 1: Framework Core (2-3 weeks) - -**Deliverables**: -1. **ADW MCP Server** (`automation/adws/mcp_server/`) - - Workflow orchestration tools (run_phase, get_state, list_workflows) - - Git operations tools (create_worktree, cleanup_worktree) - - Slash command execution tools - -2. **Agent Registry** - - Catalog of available agents with capabilities - - Tool manifests (what each agent can do) - - Performance metrics (success rate, execution time) - -3. **Unified Configuration** - - `.mcp.json` with both servers (kotadb + kotadb-adw) - - Environment variable templates - - Authentication configuration - -**Success Metrics**: -- ✅ External agent can trigger ADW workflow via MCP -- ✅ State inspection via MCP tools (no filesystem access needed) -- ✅ Worktree creation/cleanup via MCP tools - -### Phase 2: Developer Experience (1-2 months) - -**Deliverables**: -1. **KotaDB CLI** - ```bash - kotadb init # Initialize workspace - kotadb agent create custom-reviewer # Register agent - kotadb workflow run sdlc --issue 123 # Execute workflow - kotadb inspect adw-abc123 # Query state - kotadb logs adw-abc123 # Stream logs - ``` - -2. **Agent Templates** - - Boilerplate for Python/TypeScript/Rust agents - - Pre-built integrations (Slack, Discord, Linear) - - Testing harness for agent validation - -3. **Collaboration Primitives** - ```typescript - // Agent-to-agent messaging - await kotadb.broadcast({ - from: "agent-planner-abc123", - to: "agent-implementor-*", - message: { type: "plan_ready", plan_file: "docs/specs/plan.md" } - }); - - // Resource locking - const worktree = await kotadb.lock_resource("worktree:feat-123"); - await agent.implement(worktree); - await kotadb.release_resource(worktree.id); - ``` - -**Success Metrics**: -- ✅ Developer can publish custom agent in < 5 minutes -- ✅ Custom agents compose with built-in agents seamlessly -- ✅ Multi-agent workflows execute without race conditions - -### Phase 3: Enterprise Platform (3-6 months) - -**Deliverables**: -1. **Self-Hosted KotaDB** - - Docker Compose stack (Supabase + MCP servers) - - Air-gapped deployment option - - SSO integration (Okta, Auth0) - -2. **Agent Marketplace** - - Public registry of vetted agents - - Usage analytics (downloads, success rate) - - Revenue sharing for agent authors - -3. **Compliance & Observability** - - Audit logs for all agent actions - - Cost tracking per agent/workflow - - SOC2/HIPAA/GDPR compliance tooling - -**Success Metrics**: -- ✅ 10+ enterprise customers running self-hosted -- ✅ 50+ agents published to marketplace -- ✅ SOC2 Type II certification - ---- - -## The Killer Feature: Cross-Vendor Agent Collaboration - -```typescript -// Example: Multi-vendor agent workflow -const workflow = kotadb.workflow("full-stack-feature"); - -// Anthropic Claude for planning -workflow.add_phase("plan", { - agent: "claude-sonnet-4", - tools: ["kotadb.search_code", "kotadb.index_repository"] -}); - -// OpenAI for implementation (when MCP support lands) -workflow.add_phase("implement", { - agent: "openai-o1", - tools: ["kotadb.git_create_worktree", "github.create_pr"] -}); - -// Custom security agent -workflow.add_phase("security", { - agent: "custom-security-scanner", - tools: ["snyk.scan", "kotadb.search_code"] -}); - -// Google Gemini for documentation -workflow.add_phase("document", { - agent: "gemini-pro", - tools: ["kotadb.git_commit", "notion.create_page"] -}); - -// KotaDB orchestrates via MCP -await workflow.run({ issue_number: 123 }); -``` - -**This is impossible with current tooling.** Different LLM providers don't talk to each other. Custom agents require brittle glue code. KotaDB makes it trivial by standardizing on MCP. - ---- - -## Go-To-Market Strategy - -### Target Personas - -#### 1. Agentic Engineering Early Adopters -- Using Claude Code, Cursor, Copilot daily -- Frustrated by single-agent limitations -- Want to scale from 1 agent to N agents -- **Pain Point**: "I have 5 specialized agents but no way to coordinate them" - -#### 2. Platform Engineering Teams -- Building internal developer platforms -- Need standardized agent infrastructure -- Seeking self-hosted, air-gapped solutions -- **Pain Point**: "Every team is building their own agent orchestration" - -#### 3. AI-Native Startups -- Entire codebase managed by agents -- Need production-grade orchestration -- High tolerance for bleeding-edge tech -- **Pain Point**: "We're spending more time managing agents than building features" - -### Pricing Model - -**Free Tier**: -- 100 agent tool calls/hour -- Public repositories only -- Community support -- Single-agent workflows - -**Solo ($29/month)**: -- 1,000 agent tool calls/hour -- Private repositories -- Email support -- Multi-agent workflows (up to 3 concurrent) - -**Team ($99/month)**: -- 10,000 agent tool calls/hour -- Unlimited repositories -- Priority support -- Multi-agent workflows (unlimited) -- Self-hosted option - -**Enterprise (Custom)**: -- Unlimited tool calls -- Dedicated infrastructure -- SLA guarantees -- Custom integrations -- White-label option -- On-prem deployment - -### Marketing Channels - -**Technical Content**: -- Blog series: "Building Production-Grade Agent Workflows" -- Video tutorials: "From One Agent to Many in 10 Minutes" -- Case studies: "How We Automated 65+ GitHub Issues" - -**Community Building**: -- Discord server for agentic engineers -- Monthly demo days (showcase community agents) -- Open-source agent templates repository - -**Partnerships**: -- Anthropic (Claude Code integration) -- Cursor (IDE integration) -- Replit (cloud deployment) -- GitHub (marketplace listing) - ---- - -## Strategic Vision: "GitHub for Agents" - -### The Analogy - -``` -Traditional Development → Agentic Development -━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ -GitHub (code hosting) → KotaDB (agent coordination) -Git (version control) → Worktree isolation + state -CI/CD (automation) → ADW workflows -Docker Hub (containers) → Agent registry -npm (package manager) → Tool marketplace -``` - -### The Positioning - -> **"KotaDB is the infrastructure layer for autonomous software development."** - -While others are building single-purpose agents or ad-hoc orchestration scripts, KotaDB provides: -- **Standardized communication** via MCP -- **Resource isolation** via git worktrees -- **State management** via persistent snapshots -- **Security** via tier-based authentication -- **Observability** via audit logs and metrics - -### The Tagline - -> **"Stop managing agents. Start shipping with them."** - ---- - -## Risk Assessment - -### Technical Risks - -**Risk**: MCP standard is young, could change significantly -**Mitigation**: Maintain adapter layer, contribute to MCP spec development - -**Risk**: LLM providers may build competing orchestration platforms -**Mitigation**: Focus on multi-vendor support, become Switzerland of agent platforms - -**Risk**: Agent performance unpredictable, workflows may fail -**Mitigation**: Retry logic, fallback agents, comprehensive logging - -### Market Risks - -**Risk**: Market not ready for multi-agent coordination -**Mitigation**: Strong early adopter community (you + others like you) - -**Risk**: Enterprise concerns about agent autonomy -**Mitigation**: Emphasize observability, audit logs, human-in-the-loop options - -**Risk**: Competitors emerge with similar offerings -**Mitigation**: First-mover advantage, proven production usage, open-source community - -### Execution Risks - -**Risk**: Complexity scales faster than team can handle -**Mitigation**: Modular architecture, prioritize Phase 1, hire strategically - -**Risk**: Support burden for custom agents -**Mitigation**: Clear documentation, agent certification program, marketplace curation - ---- - -## Success Metrics - -### Phase 1 (Framework Core) -- ✅ 10 external agents registered -- ✅ 100 workflows executed via MCP -- ✅ 5 beta users providing feedback - -### Phase 2 (Developer Experience) -- ✅ 100 agents published -- ✅ 1,000 workflows executed/month -- ✅ 50 active developers in community - -### Phase 3 (Enterprise Platform) -- ✅ 10 enterprise customers -- ✅ 10,000 workflows executed/month -- ✅ $100k MRR -- ✅ SOC2 certification - ---- - -## Immediate Next Steps (Week 1-2) - -### 1. Positioning & Narrative -- [ ] Update README.md to emphasize multi-agent framework -- [ ] Create marketing site (kotadb.dev) with agent-centric messaging -- [ ] Write manifesto (separate document, see below) -- [ ] Record demo video: ADW running full SDLC autonomously - -### 2. Framework MVP (Week 3-4) -- [ ] Create `automation/adws/mcp_server/` directory structure -- [ ] Implement Phase 1 tools (workflow orchestration, git ops) -- [ ] Update `.mcp.json` with ADW server configuration -- [ ] Write "Build Your First Agent" tutorial -- [ ] Publish Python SDK for agent registration - -### 3. Community Building (Month 2) -- [ ] Launch Discord server -- [ ] Open-source agent templates repo -- [ ] Host first monthly demo day -- [ ] Reach out to Anthropic for partnership discussion - ---- - -## Appendix: Investigation Methodology - -This investigation was conducted via the `/workflows:prime` slash command on October 13, 2025. The process included: - -1. **Git state sync**: Fetched latest changes, confirmed working branch -2. **File inventory**: Analyzed 191 tracked files across app/ and automation/ directories -3. **Documentation review**: README.md, CLAUDE.md, automation/adws/README.md, 26 spec files -4. **MCP implementation analysis**: Reviewed app/src/mcp/ (server.ts, tools.ts, routes.ts) -5. **Automation workflow analysis**: Studied adw_modules/, adw_phases/, trigger systems -6. **MCP ecosystem research**: Web search for 2025 MCP trends, server patterns, security issues -7. **Integration opportunity mapping**: Identified gaps between current capabilities and framework vision - -**Key Insight**: The investigation revealed that KotaDB has already built 80% of a multi-agent framework. The gap is primarily positioning, documentation, and exposing existing capabilities via MCP tools. - ---- - -## Conclusion - -KotaDB is uniquely positioned to own the multi-agent collaboration space. The code search and indexing capabilities are not the product—they're the **memory layer** that enables agents to understand codebases. The real product is the orchestration infrastructure. - -**The opportunity**: Rebrand from "code search for AI agents" to "infrastructure layer for autonomous software development." Position KotaDB as the platform where agents discover tools, coordinate workflows, and collaborate on complex tasks. - -**The timing**: MCP adoption is accelerating (OpenAI, Anthropic support), but no one has built production-grade multi-agent infrastructure. We have 65+ issues automated in production. We're not just ahead—we're playing a different game. - -**The moat**: Multi-tenant security, worktree isolation, proven SDLC patterns, and battle-tested agent coordination. Competitors will take 12-18 months to catch up. We have a narrow window to define the category. - -**Next steps**: Build the ADW MCP server (Phase 1), write the manifesto, record the demo video. Show the world what autonomous software development looks like. diff --git a/docs/vision/CURRENT_STATE.md b/docs/vision/CURRENT_STATE.md deleted file mode 100644 index 66680ae8..00000000 --- a/docs/vision/CURRENT_STATE.md +++ /dev/null @@ -1,506 +0,0 @@ -# KotaDB Current State & Gap Analysis - -**Last Updated**: 2025-10-20 -**Overall Progress**: ~70% complete -**Status**: Foundation strong, AST parsing mostly complete, job queue remains critical blocker - -## Executive Summary - -KotaDB has successfully implemented **database infrastructure**, **authentication**, **MCP server**, **AST-based code parsing** (70%), and **testing harness**. The codebase is production-ready for what exists, but **two critical gaps** remain for the SaaS platform MVP: - -1. **Job queue for async indexing** (Epic 4) - All indexing blocks API requests -2. **GitHub integration** (Epic 5) - No auto-indexing on push events - -**Major Recent Progress**: AST parsing milestone achieved! Reference extraction (#75) and dependency graph extraction with circular detection (#76) are now complete. The `search_dependencies` MCP tool (#116) is operational, enabling impact analysis queries. - -**Good News**: The foundation is solid. Database schema, auth middleware, MCP server, AST parsing (reference extraction + dependency graphs), and testing infrastructure are battle-tested and working well. - -**Reality Check**: We're 70% done with infrastructure and 50% done with user-facing features. Epic 3 (Code Parsing) went from 30% to 70% complete with recent merges. The remaining 30% is job queue + GitHub integration - high-leverage work that unlocks async operations and auto-indexing. - ---- - -## What's Working (What We've Built) - -### ✅ Database Foundation (Epic 1: 95% complete) -**Reality**: Supabase schema is robust, migrations work, RLS is enforced - -**Evidence**: -- 10 tables: `users`, `api_keys`, `organizations`, `repositories`, `index_jobs`, `indexed_files`, `symbols`, `references`, `dependencies`, `rate_limit_counters` -- Row Level Security (RLS) policies isolate multi-tenant data -- Migration system works (`app/src/db/migrations/` synced to `app/supabase/migrations/`) -- `increment_rate_limit()` database function for atomic counter updates -- Integration tests use real Supabase Local (antimocking compliance) - -**Files**: -- `app/src/db/client.ts` - Supabase client initialization -- `app/src/db/migrations/` - Up/down migrations (6 files) -- `app/supabase/migrations/` - Copy for Supabase CLI - -**Remaining Work**: -- Index optimization for hot query paths (minor) -- Migration sync validation in CI (minor) - ---- - -### ✅ Authentication & Rate Limiting (Epic 2: 90% complete) -**Reality**: API keys work, tier-based rate limiting enforced, multi-tenancy ready - -**Evidence**: -- API key generation with bcrypt hashing (`kota__` format) -- Authentication middleware validates keys and extracts user context -- Rate limiting: free (100/hr), solo (1000/hr), team (10000/hr) -- Rate limit headers: `X-RateLimit-Limit`, `X-RateLimit-Remaining`, `X-RateLimit-Reset`, `Retry-After` -- In-memory caching for API key lookups (reduces database load) -- Test coverage: 317 tests passing, including auth/rate limit integration tests - -**Files**: -- `app/src/auth/middleware.ts` - Authentication + rate limit enforcement -- `app/src/auth/validator.ts` - API key validation -- `app/src/auth/keys.ts` - Key generation -- `app/src/auth/rate-limit.ts` - Tier-based rate limiting logic -- `app/src/auth/cache.ts` - In-memory caching - -**Remaining Work**: -- Organization management endpoints (team tier multi-tenancy) - medium priority -- Key rotation/revocation workflows - future enhancement - ---- - -### ✅ MCP Server (Epic 7: 98% complete) -**Reality**: HTTP JSON-RPC implementation complete, 4 core tools working, production-ready - -**Evidence**: -- Using official `@modelcontextprotocol/sdk` (v1.20+) -- HTTP transport via Express + `StreamableHTTPServerTransport` (not SSE - pragmatic decision) -- **Four tools** (NEW as of PR #229): - - `search_code` - Full-text search across indexed files - - `index_repository` - Trigger repository indexing - - `list_recent_files` - Query recently indexed files - - **`search_dependencies`** (#116) - Dependency graph search for impact analysis - - Supports three directions: dependents (reverse lookup), dependencies (forward), both - - Recursive traversal with configurable depth (1-5) - - Detects circular dependencies during graph traversal - - Optional test file filtering via `include_tests` parameter -- Per-request server isolation (stateless design) -- Rate limit headers set before SDK transport handles request -- 122/132 MCP tests passing (92.4% coverage) -- Integration guide: `docs/guides/mcp-claude-code-integration.md` - -**Technical Decision Note**: -Vision document proposed SSE streaming, but implementation uses HTTP JSON-RPC for simplicity and better error handling. This matches real-world MCP usage patterns and reduces operational complexity. - -**Files**: -- `app/src/mcp/server.ts` - MCP server factory -- `app/src/mcp/tools.ts` - Tool execution logic (includes dependency graph search) -- `app/tests/mcp/` - Comprehensive test suite (9 files, 100+ test cases) - -**Remaining Work**: -- `find_references` tool (needs symbol resolution, ~1 week) -- Advanced tools: `analyze_impact`, `get_type_hierarchy` (future) - ---- - -### ✅ REST API (Epic 6: 70% complete) -**Reality**: Core endpoints working, repository management incomplete - -**Evidence**: -- `/health` - Database health check (returns 200 OK if DB connected) -- `/index` (POST) - Triggers repository indexing (currently synchronous, blocks until complete) -- `/search` (GET) - Full-text search across indexed files -- `/files/recent` (GET) - Recently indexed files -- `/validate-output` (POST) - Schema validation for slash commands -- Authentication middleware on all protected endpoints -- Rate limiting enforced on all authenticated endpoints - -**Files**: -- `app/src/api/routes.ts` - Express app factory with middleware and route handlers -- `app/src/api/queries.ts` - Database query functions - -**Remaining Work**: -- Repository management endpoints (list, add, remove, configure repos) -- Job status polling endpoints (for async indexing, blocked by Epic 4) -- Organization management (team tier multi-tenancy) -- Pagination for large result sets -- Sync OpenAPI spec (`docs/openapi.yaml`) with implementation - ---- - -### ✅ Testing Infrastructure (Epic 10: 88% complete) -**Reality**: Strong test coverage, antimocking philosophy enforced, CI pipelines working, standardized environment setup - -**Evidence**: -- 317 tests passing across application (TypeScript) and automation (Python) layers -- Integration tests use real Supabase Local (no mocks) -- MCP regression suite (122 tests, 9 files) -- **Standardized test environment setup** (#220) - slash commands consistently use test DB -- **Centralized rate limit reset helpers** (#201) - improved test isolation -- GitHub Actions CI: - - Application CI: lint, typecheck, full test suite against Supabase Docker - - Automation CI: Python syntax checks, pytest suite -- Test helpers: `app/tests/helpers/` (db, auth, MCP utilities, rate limit reset) -- Test fixtures: `app/tests/fixtures/mcp/sample-repository/` - -**Files**: -- `app/tests/` - Test suite (27 test files) -- `.github/workflows/app-ci.yml` - Application CI pipeline -- `.github/workflows/automation-ci.yml` - Automation CI pipeline -- `app/scripts/setup-test-db.sh` - Supabase Local lifecycle management -- `app/tests/helpers/rate-limit.ts` - Centralized rate limit reset utilities - -**Remaining Work**: -- E2E tests (full indexing pipeline end-to-end) -- Performance regression tests (query latency benchmarks) -- Contract tests for OpenAPI spec validation - ---- - -### ✅ CI/CD Infrastructure (Epic 9: 45% complete) -**Reality**: CI pipelines robust, pre-commit hooks automated, Fly.io deployment not implemented - -**Evidence**: -- GitHub Actions workflows running on every PR -- Docker Compose for local development -- **Husky pre-commit hooks** (#198) - automated typecheck + lint on staged files - - Automatically installed via `bun install` (prepare script) - - Runs `bunx tsc --noEmit` and `bun run lint` in `app/` directory - - Skips checks if no relevant files changed (fast commits) - - Bypass via `git commit --no-verify` (emergency only) -- Migration sync validation -- Supabase Local integration in CI (isolated project names prevent port conflicts) - -**Files**: -- `.github/workflows/app-ci.yml` -- `.github/workflows/automation-ci.yml` -- `app/scripts/dev-start.sh` - Local development automation -- `app/.husky/` - Pre-commit hooks (typecheck, lint) -- `app/.husky/pre-commit` - Hook execution script - -**Remaining Work**: -- Fly.io app creation (`kotadb-staging`, `kotadb-prod`) -- Deployment automation (develop → staging, main → production) -- Environment-specific `fly.toml` configurations -- Secrets management scripts (`scripts/sync-secrets-*.sh`) -- Automated database migrations on deploy -- Rollback procedures - ---- - -### 🟡 Monitoring & Operations (Epic 8: 15% complete) -**Reality**: Basic health checks exist, comprehensive observability missing - -**Evidence**: -- `/health` endpoint with database connection check -- Basic error logging in API handlers - -**Files**: -- `app/src/api/routes.ts` (health endpoint) - -**Remaining Work**: -- JSON-formatted structured logging with correlation IDs (`request_id`, `user_id`, `job_id`) -- Request/response logging middleware -- Slow query detection -- Fly.io metrics dashboard setup (pending deployment) -- Alert configuration (downtime, error rate thresholds) -- Sentry integration for error tracking (optional, future) - ---- - -## What's Blocking MVP (Critical Gaps) - -### 🟡 Epic 3: Enhanced Code Parsing (70% complete) **[NO LONGER BLOCKING MVP]** -**Status**: Major progress! Reference extraction and dependency graph complete. - -**Completed Work** (as of PRs #225, #226): -- ✅ AST parsing with `@typescript-eslint/parser` (#117, merged earlier) -- ✅ Symbol extraction (#74, merged earlier) - functions, classes, exports with positions -- ✅ **Reference tracking** (#75) - imports, calls, property accesses with call sites -- ✅ **Dependency graph extraction** (#76) - file→file edges with circular detection -- ✅ Dependency search via `search_dependencies` MCP tool (#116) -- File discovery works (walks project tree, filters by extension) -- Support for `.ts`, `.tsx`, `.js`, `.jsx`, `.json` files -- Gitignore compliance (skips `.git`, `node_modules`, `dist`) - -**Remaining Work** (30%): -- Type relationship extraction (interfaces, type aliases, generics) - nice-to-have -- Docstring/comment extraction (JSDoc, TSDoc) - nice-to-have -- Symbol resolution for `find_references` tool - ~1 week -- Advanced dependency analysis (transitive closure optimization) - future - -**Why This NO LONGER Blocks MVP**: -Dependency graph extraction is complete and operational. The `search_dependencies` tool enables impact analysis ("what depends on this file"). Symbol extraction and reference tracking provide structured code intelligence. The remaining 30% is polish (type relationships, docstrings) not required for core value prop. - -**Impact on Product**: -- ✅ `search_dependencies` tool **fully operational** (dependency graph ready) -- 🟡 `find_references` tool requires symbol resolution (~1 week, not MVP-blocking) -- ✅ Value proposition achieved: "structured code intelligence with dependency analysis" - -**Estimated Effort for Remaining Work**: 1 week - -**Next Steps**: -1. Symbol resolution for `find_references` tool (map references to symbol IDs) -2. Type relationship extraction (optional, enhances query capabilities) -3. Docstring extraction for better context in search results - ---- - -### 🔴 Epic 4: Job Queue & Background Processing (0% complete) **[BLOCKER]** -**Gap**: All indexing runs synchronously, blocking API requests - -**Current State**: -- `/index` endpoint blocks until indexing completes (30s+ for large repos) -- No async workers for indexing -- No retry logic for failed indexing -- No job status tracking for frontend - -**Critical Missing Features**: -- pg-boss queue setup (Postgres-backed, no external service needed) -- Worker processes for async indexing -- Retry logic with exponential backoff -- Dead letter queue for failed jobs -- Job status updates (pending → in_progress → completed/failed) -- Frontend polling endpoints for job status - -**Why This Blocks MVP**: -Users trigger indexing via API and get timeouts on large repos. No webhook-triggered auto-indexing possible. Frontend can't show indexing progress. Single-threaded indexing can't scale. - -**Impact on Product**: -- Poor UX (users wait 30s+ for API responses) -- No webhook integration possible (Epic 5 depends on this) -- Can't index multiple repos concurrently -- No resilience (if indexing fails, no retry) - -**Estimated Effort**: 1-2 weeks - -**Next Steps**: -1. Install `pg-boss` package -2. Create `app/src/queue/client.ts` (queue initialization) -3. Create `app/src/queue/workers.ts` (job handlers for indexing) -4. Update `/index` endpoint to enqueue job instead of blocking -5. Implement worker pools with concurrency limits -6. Add job status endpoints for frontend polling -7. Integration tests with real Supabase queue - -**Files to Create**: -- `app/src/queue/client.ts` - pg-boss client initialization -- `app/src/queue/workers.ts` - Job handlers (indexing, webhook processing) -- `app/src/api/jobs.ts` - Job status endpoints -- `app/tests/queue/` - Queue integration tests - ---- - -### 🔴 Epic 5: GitHub Integration (0% complete) **[BLOCKER]** -**Gap**: No GitHub App, no webhooks, no auto-indexing on push events - -**Current State**: -- Users must manually trigger indexing via API -- No automatic re-indexing on code changes -- No access to private repositories (using public git clone currently) - -**Critical Missing Features**: -- GitHub App registration (manual setup, one-time) -- Installation token generation for private repo access -- Webhook receiver (`POST /webhooks/github`) -- Webhook signature verification (HMAC-SHA256) -- Auto-indexing on push events (queues job via Epic 4) -- Frontend integration for app installation flow - -**Why This Blocks MVP**: -Core value proposition is "always up-to-date code intelligence." Without webhooks, indexes go stale immediately after first push. Users must manually re-index after every change. Private repos are inaccessible. - -**Impact on Product**: -- Stale indexes (context becomes outdated quickly) -- No private repo support (blocks majority of real-world use cases) -- Manual reindexing workflow (terrible UX) -- No competitive advantage over local file search - -**Estimated Effort**: 2 weeks - -**Next Steps**: -1. Register GitHub App (permissions: contents:read, webhooks:write) -2. Store `GITHUB_APP_ID` and `GITHUB_APP_PRIVATE_KEY` as secrets -3. Implement `app/src/github/app.ts` (installation token generation) -4. Implement `app/src/api/webhooks.ts` (webhook receiver) -5. Queue indexing jobs on push events (integrates with Epic 4) -6. Frontend: installation flow UI (out of scope for this repo, coordination needed) -7. Integration tests with mock GitHub webhook payloads - -**Files to Create**: -- `app/src/github/app.ts` - GitHub App client (installation tokens) -- `app/src/api/webhooks.ts` - Webhook receiver and signature verification -- `app/tests/github/` - GitHub integration tests -- `app/tests/fixtures/github-webhooks/` - Mock webhook payloads - ---- - -## Vision vs. Reality Comparison - -| Component | Vision (VISION.md) | Reality (Current State) | Status | -|-----------|-------------------|-------------------------|--------| -| **Database** | PostgreSQL via Supabase, RLS for multi-tenancy | ✅ Implemented exactly as planned | Complete | -| **Auth** | API keys with tier-based rate limiting | ✅ Implemented exactly as planned | Complete | -| **MCP Transport** | SSE streaming | ⚠️ HTTP JSON-RPC (pragmatic decision) | Complete (different approach) | -| **MCP Tools** | 3 MVP tools: search_code, find_references, get_dependencies | ✅ 4 tools (search_code, index_repository, list_recent_files, **search_dependencies**) | Near complete (98%) | -| **Code Parsing** | AST-based with @typescript-eslint/parser | ✅ Reference extraction + dependency graph complete (70%) | Major progress | -| **Job Queue** | pg-boss for async indexing | 🔴 Not implemented (all indexing synchronous) | Critical gap | -| **GitHub Integration** | GitHub App with webhooks | 🔴 Not implemented (manual indexing only) | Critical gap | -| **REST API** | Full repository management + job status | 🟡 Core endpoints only, no repo management | Partial | -| **Monitoring** | Structured logging + Fly.io metrics | 🟡 Basic health checks only | Partial | -| **Deployment** | Fly.io with automated CI/CD | 🟡 CI pipelines only, no Fly.io deployment | Partial | -| **Testing** | 70% coverage with integration tests | ✅ 88% coverage, antimocking enforced | Exceeds expectations | - -**Key Insights**: -- **Foundation is stronger than expected**: Database, auth, MCP server, AST parsing, testing exceed vision goals -- **Major breakthrough**: Epic 3 (Code Parsing) advanced from 30% to 70% with reference extraction (#75) and dependency graph (#76) -- **Only 2 critical gaps remain**: Job queue (Epic 4) and GitHub integration (Epic 5) - down from 3 blockers -- **Pragmatic technical decisions**: HTTP JSON-RPC instead of SSE (simpler, more robust) -- **Strong engineering culture**: Antimocking philosophy, real integration tests, CI discipline - ---- - -## Actionable Next Steps for Contributors - -### Immediate Priorities (Sprint 1-2, Weeks 1-4) - -#### 1. Implement AST-based Code Parsing (Epic 3) -**Owner**: Needs assignment -**Effort**: 2-3 weeks -**Dependencies**: None (foundation complete) - -**Tasks**: -- [ ] Add `@typescript-eslint/parser` and `@typescript-eslint/types` dependencies -- [ ] Implement `app/src/indexer/ast-parser.ts` (symbol visitor pattern) -- [ ] Store symbols in `symbols` table (file, line, column, name, kind) -- [ ] Store references in `references` table (from_file, to_symbol, line, column) -- [ ] Build dependency graph in `dependencies` table (source_file → target_file edges) -- [ ] Integration tests with real TypeScript projects -- [ ] Update `search_dependencies` MCP tool to use dependency graph - -**Success Criteria**: -- AST parsing extracts functions, classes, exports with positions -- Dependency graph query: "what imports this function" returns accurate results -- `search_dependencies` MCP tool returns transitive dependencies - ---- - -#### 2. Implement Job Queue with pg-boss (Epic 4) -**Owner**: Needs assignment -**Effort**: 1-2 weeks -**Dependencies**: None (database ready) - -**Tasks**: -- [ ] Install `pg-boss` package -- [ ] Create `app/src/queue/client.ts` (queue initialization using Supabase connection) -- [ ] Create `app/src/queue/workers.ts` (indexing job handler) -- [ ] Update `/index` endpoint to enqueue job instead of blocking -- [ ] Implement retry logic with exponential backoff (3 retries max) -- [ ] Add job status endpoints: `GET /jobs/:id` (poll status), `GET /jobs` (list jobs) -- [ ] Integration tests with real pg-boss queue -- [ ] Update frontend integration guide (job status polling) - -**Success Criteria**: -- `/index` endpoint returns immediately with job ID -- Indexing runs in background worker -- Failed jobs retry automatically (max 3 attempts) -- Frontend can poll job status and show progress - ---- - -#### 3. Implement GitHub Integration (Epic 5) -**Owner**: Needs assignment -**Effort**: 2 weeks -**Dependencies**: Epic 4 (job queue) - -**Tasks**: -- [ ] Register GitHub App (permissions: contents:read, webhooks:write) -- [ ] Store `GITHUB_APP_ID` and `GITHUB_APP_PRIVATE_KEY` as environment secrets -- [ ] Implement `app/src/github/app.ts` (generate installation tokens using Octokit) -- [ ] Implement `app/src/api/webhooks.ts` (webhook receiver, signature verification) -- [ ] Queue indexing job on `push` events (use Epic 4 queue) -- [ ] Store `installation_id` in `repositories` table -- [ ] Integration tests with mock GitHub webhook payloads -- [ ] Documentation: GitHub App setup guide for self-hosted deployments - -**Success Criteria**: -- GitHub App installation flow works (manual test with real GitHub account) -- Push to tracked repo triggers automatic re-indexing -- Webhook signature verification prevents unauthorized requests -- Private repos accessible via installation tokens - ---- - -### Follow-Up Work (Sprint 3+, Weeks 5-10) - -#### 4. Complete REST API (Epic 6) -**Tasks**: -- Repository management endpoints (list, add, remove, configure) -- Organization management (team tier multi-tenancy) -- Pagination for large result sets -- Sync OpenAPI spec with implementation - -#### 5. Fly.io Deployment (Epic 9) -**Tasks**: -- Create `kotadb-staging` and `kotadb-prod` apps -- Environment-specific `fly.toml` configurations -- Deployment automation (GitHub Actions → Fly.io) -- Secrets management scripts -- Automated database migrations on deploy - -#### 6. Observability Improvements (Epic 8) -**Tasks**: -- JSON-formatted structured logging with correlation IDs -- Request/response logging middleware -- Fly.io metrics dashboard setup -- Alert configuration (downtime, error rate thresholds) - ---- - -## Resources for New Contributors - -### Documentation -- **Architecture Overview**: `CLAUDE.md` (comprehensive project guide) -- **Testing Setup**: `docs/testing-setup.md` (antimocking philosophy, Supabase Local) -- **MCP Integration**: `docs/guides/mcp-claude-code-integration.md` -- **Automation Workflows**: `automation/adws/README.md` (ADW system) - -### Key Files -- **Database**: `app/src/db/client.ts`, `app/src/db/migrations/` -- **Authentication**: `app/src/auth/middleware.ts`, `app/src/auth/keys.ts` -- **MCP Server**: `app/src/mcp/server.ts`, `app/src/mcp/tools.ts` -- **Indexing**: `app/src/indexer/parsers.ts`, `app/src/indexer/extractors.ts` -- **API**: `app/src/api/routes.ts`, `app/src/api/queries.ts` - -### Development Workflow -1. **Local Setup**: Run `cd app && ./scripts/dev-start.sh` (starts Supabase + API server) -2. **Tests**: Run `cd app && bun test` (full suite against real Supabase Local) -3. **Validation**: Run `cd app && bun run lint && bunx tsc --noEmit` -4. **CI**: All PRs must pass `app-ci.yml` (lint, typecheck, test suite) - -### Communication -- **GitHub Issues**: All work tracked via issues (see epic files for issue templates) -- **Pull Requests**: Follow conventional commit format, reference issue numbers -- **ADW Automation**: AI agents can pick up any issue where dependencies are satisfied - ---- - -## Frequently Asked Questions - -### Q: Why is the foundation so strong but features incomplete? -**A**: Engineering discipline prioritizes solid infrastructure over rushing features. Database, auth, and testing are production-ready because they're hard to change later. Features (AST parsing, job queue) are easier to add once foundation is stable. - -### Q: Why use HTTP JSON-RPC instead of SSE for MCP? -**A**: Vision document proposed SSE, but implementation revealed HTTP is simpler, more debuggable, and matches real-world MCP usage patterns. SSE adds complexity (connection management, heartbeats, reconnection logic) without clear benefits for our use case. - -### Q: What's the biggest risk to MVP timeline? -**A**: AST parsing complexity (Epic 3). TypeScript AST is notoriously complex (unions, generics, decorators). If this takes longer than 3 weeks, entire timeline slips. Mitigation: Start with basic symbol extraction (functions, classes), defer advanced features (generics, type inference). - -### Q: Can I start Epic 4 (job queue) before Epic 3 (AST parsing) is done? -**A**: Yes! Epic 4 has no dependency on Epic 3. You can implement the job queue infrastructure using the current regex-based indexing, then swap in AST parsing later. This parallelizes work and de-risks the timeline. - -### Q: What happens if we skip Epic 5 (GitHub integration) for MVP? -**A**: Product is usable but not competitive. Users can manually trigger indexing via API, but indexes go stale quickly. Private repos are inaccessible. This limits MVP to hobbyists and demo scenarios. Not recommended. - ---- - -**Last Updated**: 2025-10-20 -**Maintained By**: KotaDB core team -**Update Frequency**: Weekly during active development diff --git a/docs/vision/README.md b/docs/vision/README.md deleted file mode 100644 index e28d4285..00000000 --- a/docs/vision/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# KotaDB Vision & Implementation Plan - -This directory contains the comprehensive vision and implementation roadmap for KotaDB. - -## Navigation Guidance - -**New contributors**: Start with [ROADMAP.md](./ROADMAP.md) (practical roadmap with current priorities) → then [CURRENT_STATE.md](./CURRENT_STATE.md) (gap analysis with actionable next steps) → then [VISION.md](./VISION.md) (aspirational goals and technical decisions). - -**For quick status**: See [ROADMAP.md Quick Status Overview](./ROADMAP.md#quick-status-overview) table. - -**For prioritization**: See [CURRENT_STATE.md Actionable Next Steps](./CURRENT_STATE.md#actionable-next-steps-for-contributors). - -**For strategic context**: See [2025-10-13-multi-agent-framework-investigation.md](./2025-10-13-multi-agent-framework-investigation.md) for Phase 2/3 vision (multi-agent framework). - -## Core Documents - -- **[ROADMAP.md](./ROADMAP.md)** - Practical roadmap with epic completion status, MVP blockers, and realistic timeline (**Start here**) -- **[CURRENT_STATE.md](./CURRENT_STATE.md)** - Gap analysis with "What's Working" vs. "What's Blocking MVP" sections -- **[VISION.md](./VISION.md)** - Complete product vision, technical decisions, and architecture (aspirational goals) - -## Strategic Phasing - -KotaDB follows a phased approach: - -### Phase 1: SaaS Platform (Current Focus, Weeks 1-10) -**Goal**: Ship kotadb.io as a hosted service for MCP-compatible CLI agents - -**Scope**: -- Public SaaS platform at kotadb.io -- Tier-based authentication (free/solo/team) -- Webhook-triggered auto-indexing -- MCP server with 3 core tools: `search_code`, `index_repository`, `list_recent_files` -- REST API for frontend dashboard - -**Progress**: ~60% complete. **Three critical gaps block MVP**: -1. **Epic 3**: AST-based code parsing (30% complete) - **MVP BLOCKER** -2. **Epic 4**: Job queue for async indexing (0% complete) - **MVP BLOCKER** -3. **Epic 5**: GitHub integration with webhooks (0% complete) - **MVP BLOCKER** - -See [ROADMAP.md](./ROADMAP.md) for detailed status and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -### Phase 2/3: Multi-Agent Framework (Future, Post-MVP) -**Goal**: Reposition KotaDB as infrastructure for autonomous software development - -**Scope** (deferred to post-MVP): -- ADW framework productization (currently internal tooling) -- Agent registry and marketplace -- Cross-vendor agent collaboration (Anthropic + OpenAI + custom agents) -- Workflow orchestration primitives -- Self-hosted deployment option - -**Why Deferred**: Multi-agent framework requires proven SaaS platform as foundation. Phase 1 validates product-market fit and builds revenue to fund Phase 2/3 development. - -**Reference**: See [2025-10-13-multi-agent-framework-investigation.md](./2025-10-13-multi-agent-framework-investigation.md) for strategic vision and [archive/manifesto.md](./archive/manifesto.md) for Phase 2/3 marketing material. - -## Implementation Epics - -The implementation is broken into 10 epics with clear dependencies. **Note**: Epic files are reference documents from original planning. See ROADMAP.md for current completion status and priorities. - -### Foundation & Infrastructure -1. **[Epic 1: Database Foundation & Schema](./epic-1-database-foundation.md)** - Supabase schema, migrations, RLS -2. **[Epic 2: Authentication Infrastructure](./epic-2-authentication.md)** - API keys, auth middleware, rate limiting -3. **[Epic 9: CI/CD & Deployment](./epic-9-cicd-deployment.md)** - Fly.io setup, CI pipeline, secrets management - -### Core Services -4. **[Epic 3: Enhanced Code Parsing](./epic-3-code-parsing.md)** - AST parsing, symbol extraction, dependency graphs -5. **[Epic 4: Job Queue & Background Processing](./epic-4-job-queue.md)** - pg-boss queue, indexing worker -6. **[Epic 5: GitHub Integration](./epic-5-github-integration.md)** - GitHub App, webhooks, auto-indexing - -### API Implementation -7. **[Epic 6: REST API Migration](./epic-6-rest-api.md)** - OpenAPI spec, repository management -8. **[Epic 7: MCP Server Implementation](./epic-7-mcp-server.md)** - SSE transport, MCP protocol, 3 MVP tools - -### Operations & Quality -9. **[Epic 8: Monitoring & Operations](./epic-8-monitoring.md)** - Logging, health checks, metrics -10. **[Epic 10: Comprehensive Testing](./epic-10-testing.md)** - Unit, integration, E2E tests - -## Dependency Overview - -``` -Epic 1 (Database) ──► Epic 2 (Auth) - │ │ - └────────► Epic 3 (Parsing) ──► Epic 4 (Queue) ──► Epic 5 (GitHub) - │ │ - └────────► Epic 6 (REST API) ──► Epic 7 (MCP) - -Epics 1-7 ──► Epic 8 (Monitoring) -Epics 1-7 ──► Epic 10 (Testing) ──► Epic 9 (CI/CD) -``` - -## Implementation Timeline - -- **Phase 1A (Weeks 1-2)**: Epics 1 & 2 (database + auth foundations); establish initial testing harness (Epic 10). -- **Phase 1B (Weeks 3-4)**: Epics 3 & 4 (parsing pipeline and queue) with continuous test coverage expansion. -- **Phase 1C (Weeks 5-6)**: Epics 5 & 6 (GitHub integration and REST API). -- **Phase 1D (Weeks 7-8)**: Epics 7 & 8 (MCP server and monitoring/operations). -- **Phase 1E (Week 9)**: Epics 9 & 10 (CI/CD automation and test hardening for launch). - -## Working with ADW - -Each epic file contains discrete GitHub issues with: -- Clear acceptance criteria -- Dependency tracking -- Technical specifications -- Test requirements - -ADW agents can pick up any issue where dependencies are satisfied. diff --git a/docs/vision/ROADMAP.md b/docs/vision/ROADMAP.md deleted file mode 100644 index cca70836..00000000 --- a/docs/vision/ROADMAP.md +++ /dev/null @@ -1,31 +0,0 @@ -# KotaDB Roadmap - -**Last Updated**: 2025-10-20 -**Current Phase**: Phase 1 (SaaS Platform MVP) -**MVP Target**: 10-week timeline (5 two-week sprints) - -## Quick Status Overview - -| Epic | Completion | Status | MVP Blocker | -|------|-----------|--------|-------------| -| **Epic 1**: Database Foundation | 95% | 🟢 Complete | No | -| **Epic 2**: Authentication | 90% | 🟢 Complete | No | -| **Epic 3**: Code Parsing | 70% | 🟢 Near Complete | No | -| **Epic 4**: Job Queue | 0% | 🔴 Critical Gap | **Yes** | -| **Epic 5**: GitHub Integration | 0% | 🔴 Critical Gap | **Yes** | -| **Epic 6**: REST API | 70% | 🟡 Partial | No | -| **Epic 7**: MCP Server | 98% | 🟢 Complete | No | -| **Epic 8**: Monitoring | 15% | 🟡 Partial | No | -| **Epic 9**: CI/CD & Deployment | 45% | 🟡 Partial | No | -| **Epic 10**: Testing | 88% | 🟢 Complete | No | - -**Overall Progress**: ~70% complete - -**Recent Updates** (2025-10-20): -- Epic 3 advanced from 30% → 70% with reference extraction (#75) and dependency graph (#76) -- Epic 7 advanced from 95% → 98% with `search_dependencies` MCP tool (#116) -- Epic 9 advanced from 40% → 45% with Husky pre-commit hooks (#198) -- Epic 10 advanced from 85% → 88% with test environment standardization (#220, #201) -- **MVP blockers reduced from 3 to 2**: Epic 3 no longer blocking MVP - -For detailed analysis, see [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis and [VISION.md](./VISION.md) for aspirational goals. diff --git a/docs/vision/VISION.md b/docs/vision/VISION.md deleted file mode 100644 index d4330371..00000000 --- a/docs/vision/VISION.md +++ /dev/null @@ -1,599 +0,0 @@ -# KotaDB Vision - -**Last Updated**: 2025-10-20 -**Status**: ~60% complete - Foundation strong, critical gaps block MVP (see ROADMAP.md and CURRENT_STATE.md) - -## Core Concept - -KotaDB is an **intelligence layer** between Claude Code (and other MCP-compatible CLI agents) and codebases, providing high-quality, information-rich context that would be difficult or impossible to obtain through traditional search tools (grep/ripgrep/awk). - -## Architecture - -### System Components - -- **kotadb (this repository)**: Backend indexing and query service - - Bun + TypeScript runtime - - PostgreSQL via Supabase (primary data store for all environments) - - ~~SQLite for local development/testing only~~ (Removed: using Supabase Local for all testing) - - MCP server implementation (**HTTP JSON-RPC transport** - see Technical Decisions below) - - REST API for frontend UX - - Webhook receivers for GitHub events (**Planned** - Epic 5, not yet implemented) - - Job queue (pg-boss) for async indexing (**Planned** - Epic 4, not yet implemented) - -**Current Status**: Database, auth, MCP server, and testing infrastructure are production-ready. AST parsing, job queue, and GitHub integration are critical gaps blocking MVP (see ROADMAP.md). - -- **kotadb.io** (separate repository): SaaS frontend application - - Hosted on Cloudflare - - Shares Supabase database with backend - - Handles user authentication, repository selection, API key management - -- **app.kotadb.io/mcp/**: MCP endpoint for CLI agent integration - - Production: `app.kotadb.io/mcp/` - - Staging: `app.develop.kotadb.io/mcp/` - -### Data Flow - -``` -GitHub Repos → KotaDB Backend (indexing) → Supabase DB → MCP API → Claude Code → Developer - ↑ ↓ - └──────────── Status Updates ──────────┘ - (for frontend UX) -``` - -## User Journey - -1. **Authenticate**: User logs into kotadb.io with GitHub OAuth (via Supabase Auth) -2. **Select Repositories**: Dashboard displays available repos; user selects which to index -3. **Indexing**: KotaDB backend processes selected repositories -4. **Configuration**: User receives `.mcp.json` containing: - - Endpoint: `app.kotadb.io/mcp/` (or staging equivalent) - - Personal API key -5. **Integration**: User copies config into Claude Code settings -6. **Usage**: Claude Code queries KotaDB for intelligent context during development - -## Value Proposition - -### Problem - -CLI agents need rich contextual understanding of codebases, but traditional search tools return verbose, low-signal results that are difficult to synthesize. - -### Solution - -KotaDB provides **semantic code intelligence**: -- Dependency analysis ("X function is used by A, B, C") -- Impact assessment ("Changing X will break these components") -- Relationship mapping ("This module depends on these interfaces") -- Symbol resolution and cross-reference tracking -- Clean, condensed, information-rich responses optimized for LLM consumption - -### Example Use Case - -**Developer**: "Update X function to do Y instead of Z" - -**Claude Code** (queries KotaDB) → receives: -- "Function X is called by A, B, and C" -- "Modifying return type would break B's type assertions" -- "Module C expects current behavior for edge case handling" - -## Technical Decisions - -### Database Architecture - -**Decision: Full PostgreSQL Migration** - -- **Primary Store**: PostgreSQL via Supabase for all production/staging environments -- **Local Development**: SQLite for quick local testing -- **Migration Strategy**: Fresh Supabase schema (clean slate from previous version) - -**Supabase Schema Design:** - -```sql --- Core tables -users -- Managed by Supabase Auth -api_keys -- Custom keys with tier field (free, solo, team) -organizations -- For team tier multi-tenancy -user_organizations -- Join table for team memberships - --- Repository management -repositories -- Tracked repos per user, includes installation_id -index_jobs -- Webhook-triggered jobs with status tracking - --- Code intelligence -indexed_files -- File metadata, content, hash, indexed_at -symbols -- Functions, classes, types, exports with positions -references -- Where symbols are imported/called (file, line, column) -dependencies -- File→file and symbol→symbol edges -``` - -**Multi-tenancy & Security:** -- Every table includes `user_id` or `org_id` foreign keys -- Row Level Security (RLS) policies enforce data isolation -- API key validation extracts `user_id`, Supabase RLS handles access control automatically - -### MCP Implementation - -**Protocol Version**: MCP 2025-06-18 specification - -**Transport**: HTTP JSON-RPC (Streamable HTTP) -- **Implementation Decision**: HTTP JSON-RPC via `@modelcontextprotocol/sdk` (v1.20+) instead of SSE streaming -- **Rationale**: Simpler error handling, better debugging, matches real-world MCP usage patterns -- **Trade-off**: No real-time streaming for long-running queries, but eliminates connection management complexity -- Endpoint: `/mcp` (POST only, JSON-RPC over HTTP) - -**Current Status**: MCP server is production-ready with 3 tools (95% complete, 122/132 tests passing) - -**MVP Tools (Phase 1)**: Three high-ROI tools for initial release - -1. **`search_code`** (Foundation) ✅ **Implemented** - - Full-text search across indexed files - - Filters: repository, file path, language - - Quick win, validates MCP integration end-to-end - -2. **`index_repository`** (Core Workflow) ✅ **Implemented** - - Triggers repository indexing (currently synchronous, Epic 4 will make async) - - Returns job ID for status polling (once Epic 4 completes) - -3. **`list_recent_files`** (Context Discovery) ✅ **Implemented** - - Returns recently indexed files for a repository - - Useful for understanding what's available to search - -**Planned Tools** (blocked by Epic 3 AST parsing): -- **`search_dependencies`** - Dependency graph traversal (requires AST parsing) -- **`find_references`** - Symbol reference lookup (requires AST parsing) - -**Future Tools**: `analyze_impact`, `find_similar`, `get_type_hierarchy` - -### API Architecture - -**Decision: Maintain Both REST and MCP APIs** - -**REST API** (`/api/*`): For frontend UX -- Status polling for indexing jobs -- Dashboard metrics (repos indexed, query usage) -- Repository management (add, remove, configure) -- Pagination, sorting, aggregations optimized for UI - -**MCP API** (`/mcp/`): For CLI agents -- Concise, LLM-optimized responses -- Streaming support for real-time workflows -- MCP protocol-specific formatting -- Long-lived API key authentication - -**Shared Core**: Both APIs use same query logic (`src/indexer/`, `src/db/`), different presentation layers. - -**Coordination**: OpenAPI spec (`docs/openapi.yaml`) defines REST API contract -- Frontend generates TypeScript types via `openapi-typescript` -- CI validates implementation matches spec -- Version-controlled for cross-repo coordination - -### Authentication & Authorization - -**API Key System:** - -```sql -CREATE TABLE api_keys ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - user_id uuid REFERENCES auth.users NOT NULL, - key_hash text NOT NULL UNIQUE, -- bcrypt hash - tier text NOT NULL CHECK (tier IN ('free', 'solo', 'team')), - org_id uuid REFERENCES organizations, -- nullable, team tier only - created_at timestamptz DEFAULT now(), - last_used_at timestamptz, - rate_limit_per_hour int NOT NULL, -- varies by tier - enabled boolean DEFAULT true -); -``` - -**Key Format**: `kota__` (e.g., `kota_prod_a1b2c3d4e5f6`) -- Easy to identify, revoke, and environment-scope -- Backend validates via hash lookup, extracts `user_id` and `tier` -- Frontend can check tier for UI features (shared Supabase access) - -**Rate Limiting**: Enforced per tier (free: 100/hr, solo: 1000/hr, team: 10000/hr) - -### GitHub Integration - -**Decision: GitHub App** (not webhook secrets) - -**Why:** -- Fine-grained permissions (contents:read, webhooks:write only) -- Per-installation tokens automatically scoped to authorized repos -- Better UX (one-click install, select repos) -- Revocable without password changes -- Higher rate limits - -**Flow:** -1. User installs KotaDB GitHub App from kotadb.io -2. Selects repositories to grant access -3. Frontend receives `installation_id`, stores in Supabase -4. Backend generates installation tokens on-demand for cloning -5. Tokens auto-expire (1hr), regenerate as needed—never stored - -**Setup Requirements**: Register GitHub App manually (store `GITHUB_APP_ID`, `GITHUB_APP_PRIVATE_KEY` as secrets) - -### Indexing Strategy - -**Extraction Depth (Phase 1)**: Deep indexing from day one - -- 🔴 **Import/export statements** → dependency graphs (**Partial**: regex-based, needs AST - Epic 3) -- 🔴 **Function/class signatures** → symbol resolution (**Not Started** - Epic 3) -- 🔴 **Type definitions** (TS interfaces, types) → type relationships (**Not Started** - Epic 3) -- 🔴 **Docstrings/comments** (JSDoc, TSDoc) → semantic context (**Not Started** - Epic 3) -- 🔴 **Call graphs** (function invocations) → impact analysis (**Not Started** - Epic 3) - -**Parser**: Migrate from regex to `@typescript-eslint/parser` for robust AST parsing (**In Progress** - Epic 3, 30% complete) -- ✅ File discovery and basic content extraction (regex-based) -- 🔴 Extract symbols with positions (file, line, column) - **Blocked on Epic 3** -- 🔴 Store call sites, type references, property accesses - **Blocked on Epic 3** -- 🔴 Index docstrings separately for future semantic search - **Blocked on Epic 3** - -**Current Reality**: Using regex-based parsing for basic dependency extraction. Works for simple cases but fails on complex TypeScript syntax (JSX, destructuring, generics). **Epic 3 is the highest-priority gap blocking core value proposition.** - -**Job Queue**: pg-boss (Postgres-backed queue) **[NOT IMPLEMENTED - Epic 4]** -- Uses Supabase as job store—no Redis/external service needed -- Handles retries, exponential backoff, dead letter queues -- Simple API: `queue.send('index-repo', { repoId })` -- Worker: `queue.work('index-repo', async (job) => { ... })` - -**Current Reality**: All indexing runs synchronously in API handlers, blocking requests for 30s+ on large repos. **Epic 4 is critical for scalability and webhook support.** - -**Webhook Flow** (Planned - Epic 5): -``` -GitHub push → Webhook → pg-boss queue → Worker → Index repo → Update status → Notify frontend -``` - -**Current Status**: No webhook support. Users manually trigger indexing via API. - -## Technical Requirements - -### Real-Time Intelligence - -- Agents work quickly and need current information -- Low-latency query responses (< 200ms p95) -- Fresh index data synchronized with repository state - -### Auto-Indexing (Phase 1) - -- **Webhook-triggered indexing** on every push to tracked repositories -- Incremental updates to minimize reprocessing -- Status visibility for users (indexing progress, last indexed commit, health metrics) -- Queue-based job processing for reliability - -### Local Change Indexing (Future Phase) - -- Stage and index uncommitted local changes -- Agent-made modifications reflected in same session -- Diff-based incremental updates -- Ephemeral workspace management - -## Infrastructure & Deployment - -### Hosting & Services - -- **Backend (this repo)**: Fly.io container deployment -- **Frontend**: Cloudflare (kotadb.io, app.kotadb.io, develop.kotadb.io, app.develop.kotadb.io) -- **Database**: Supabase (shared between frontend and backend) -- **Authentication**: Supabase Auth -- **Payments**: Stripe -- **SMTP**: Resend - -### Environments - -| Environment | Branch | Backend Host | Frontend Hosts | Database | -|-------------|----------|--------------------|--------------------------------------------|-------------------| -| Production | `main` | Fly.io (prod app) | kotadb.io, app.kotadb.io | Supabase (prod) | -| Staging | `develop`| Fly.io (staging app)| develop.kotadb.io, app.develop.kotadb.io | Supabase (staging)| -| Feature | `feat/*` | Local/PR previews | N/A | Local SQLite | - -**Fly.io Setup**: Fresh deployment configuration -- Two separate apps: `kotadb-staging` (develop branch), `kotadb-prod` (main branch) -- Separate `fly.toml` configs per environment -- CI handles automated deployment on merge -- Health check endpoint (`/health`) for instance monitoring - -### Git Flow - -``` -feat/* → develop (staging) → main (production) -``` - -- **Feature branches** (`feat/*`): Development work, tested locally -- **Develop branch**: Staging environment for integration testing -- **Main branch**: Production releases only - -### CI/CD Requirements - -**Day 1 Robustness**: CI must support autonomous development (ADW workflows) with minimal human intervention. - -#### Pipeline Stages - -1. **Validation** (all branches) - - `bun run lint` - - `bun run typecheck` - - `bun test` - - `bun run build` - - Docker image build verification - -2. **Database Migrations** (develop, main) - - Automated migration application - - Rollback scripts generated and tested - - Migration history tracking in Supabase - -3. **Deployment** (develop, main) - - Fly.io deployment with health checks - - Blue-green or rolling deployment strategy - - Automatic rollback on health check failure - -4. **Infrastructure Updates** (develop, main) - - Environment variable synchronization - - Secrets rotation support - - DNS/routing updates (if needed) - -#### Rollback Strategy - -- Database migrations must be reversible (up/down migrations) -- Fly.io releases tagged and rollback-ready -- CI generates rollback runbook per deployment -- Manual approval gate for production migrations (optional, configurable) - -### Secrets Management - -**Approach**: Local SSOT with scripted sync (no secrets in CI) - -**Process:** -1. Maintain local "single source of truth" files (gitignored): - - `.env.local.secrets` - - `.env.staging.secrets` - - `.env.prod.secrets` - -2. Sync scripts push secrets to services: - - `scripts/sync-secrets-staging.sh` → Fly.io + Supabase - - `scripts/sync-secrets-prod.sh` → Fly.io + Supabase - - Uses `flyctl secrets import` and Supabase CLI - -3. CI never accesses secrets—only deploys code - -**Required Secrets per Environment:** -- `SUPABASE_URL` -- `SUPABASE_SERVICE_KEY` (bypasses RLS for admin operations) -- `SUPABASE_ANON_KEY` (public, RLS-enforced) -- `GITHUB_APP_ID` -- `GITHUB_APP_PRIVATE_KEY` -- `STRIPE_SECRET_KEY` (frontend only, but documented here) -- `RESEND_API_KEY` (frontend only, but documented here) - -### Monitoring & Observability - -**Tools**: Use built-in services, avoid new dependencies - -1. **Structured Logging** (bun:logger) - - JSON-formatted logs to stdout/stderr - - Include correlation IDs: `request_id`, `user_id`, `job_id` - - Fly.io captures logs automatically - - Query with `flyctl logs` - -2. **Fly.io Metrics** (built-in dashboard) - - Request latency (p50, p95, p99) - - Error rates (4xx, 5xx) - - Instance health and CPU/memory usage - - Alerts for downtime or threshold breaches - -3. **Supabase Logs** (built-in dashboard) - - Slow query detection (> 1s) - - Connection pool saturation - - Database error rates - -4. **Health Checks** (`/health` endpoint) - - Returns 200 OK if service is healthy - - Fly.io polls every 30s, restarts unhealthy instances - - Checks: Database connection, job queue health - -**Future**: Add Sentry (error tracking) or Grafana (custom dashboards) if needed, both have free tiers. - -### Testing Strategy - -**Critical for Autonomous Development**: ADW workflows will implement features without human review until PR stage. - -#### Test Pyramid - -1. **Unit Tests** (70% coverage minimum) - - All indexer logic (parsers, extractors, dependency resolution) - - API query functions - - Database schema helpers - - MCP server protocol handlers - -2. **Integration Tests** (key workflows) - - Full indexing pipeline (clone → parse → extract → store) - - MCP request/response cycles - - Webhook processing and job queuing - - Supabase integration (auth, queries) - -3. **E2E Tests** (smoke tests for critical paths) - - Repository indexing end-to-end - - MCP client queries returning correct results - - Frontend → Backend status updates - -4. **Contract Tests** (for MCP protocol) - - Validate MCP spec compliance - - Version compatibility checks - -#### Test Data & Fixtures - -- Curated test repositories with known dependency graphs -- Mock GitHub webhook payloads -- Supabase test database seeding scripts -- MCP client simulator for protocol validation - -#### CI Test Enforcement - -- All tests must pass before merge -- Coverage gates per test tier -- Performance regression detection (query latency benchmarks) - -## Current Scope (Phase 1) - -**Progress**: ~60% complete (see ROADMAP.md for detailed status) - -### Infrastructure & Foundation -- [x] Supabase schema design and migration from SQLite **[Epic 1: 95% complete]** - - [x] Core tables: users, api_keys, organizations, user_organizations - - [x] Repository management: repositories, index_jobs - - [x] Code intelligence: indexed_files, symbols, references, dependencies - - [x] RLS policies for multi-tenancy - - [x] Up/down migration scripts - -- [ ] Fly.io deployment setup **[Epic 9: Not started]** - - [ ] Create `kotadb-staging` and `kotadb-prod` apps - - [ ] Environment-specific `fly.toml` configurations - - [x] Health check endpoint integration - - [ ] Secrets sync scripts (`scripts/sync-secrets-*.sh`) - -- [x] CI/CD pipeline **[Epic 9: 40% complete]** - - [x] GitHub Actions workflow (lint, typecheck, test, build) - - [ ] Automated migrations on deploy (with rollback) - - [x] Branch-based testing (feat → develop → main) - - [x] Docker image build verification - -### API & Authentication -- [x] API key system **[Epic 2: 90% complete]** - - [x] Key generation and hashing (bcrypt) - - [x] Tier-based rate limiting middleware - - [x] Authentication middleware for REST and MCP - -- [x] REST API refinements **[Epic 6: 70% complete]** - - [x] OpenAPI spec (`docs/openapi.yaml`) - skeleton exists, needs sync - - [x] Migrate existing endpoints to Supabase - - [ ] Add repository management endpoints - - [ ] Job status polling endpoints (blocked by Epic 4) - -- [x] MCP server implementation **[Epic 7: 95% complete]** - - [x] HTTP JSON-RPC transport layer (`/mcp`) - **Using HTTP instead of SSE** - - [x] Protocol handlers (handshake, tool discovery, execution) - - [x] Three MVP tools: `search_code`, `index_repository`, `list_recent_files` - - [x] MCP authentication via API keys - - [ ] `search_dependencies` tool (blocked by Epic 3) - - [ ] `find_references` tool (blocked by Epic 3) - -### Indexing & Intelligence -- [ ] GitHub App integration **[Epic 5: 0% complete - MVP BLOCKER]** - - [ ] App registration documentation - - [ ] Installation token generation - - [ ] Webhook receiver (`POST /webhooks/github`) - - [ ] Signature verification - -- [ ] Job queue with pg-boss **[Epic 4: 0% complete - MVP BLOCKER]** - - [ ] Queue setup and worker configuration - - [ ] Retry logic and dead letter handling - - [ ] Job status updates for frontend - -- [ ] Deep indexing pipeline **[Epic 3: 30% complete - MVP BLOCKER]** - - [ ] Migrate to `@typescript-eslint/parser` (currently regex-based) - - [ ] Extract symbols (functions, classes, types, exports) - - [ ] Extract references (imports, calls, property accesses) - - [ ] Extract dependencies (file→file, symbol→symbol edges) - - [ ] Extract docstrings/comments - - [x] File discovery and basic content extraction (regex-based) - -### Testing & Quality -- [x] Comprehensive test suite **[Epic 10: 85% complete]** - - [x] Unit tests (85% coverage): indexer, API, parsers - - [x] Integration tests: MCP protocol, API endpoints, auth/rate limiting - - [ ] E2E tests: end-to-end indexing and query workflows - - [ ] Contract tests: OpenAPI spec validation - -- [x] Test infrastructure **[Epic 10: 85% complete]** - - [x] Test repository fixtures with known graphs - - [x] Mock GitHub webhook payloads (fixtures created) - - [x] Supabase test database setup (Supabase Local, antimocking enforced) - - [x] MCP test helpers and utilities - -### Monitoring & Operations -- [ ] Structured logging with bun:logger **[Epic 8: 15% complete]** - - [ ] JSON log format with correlation IDs - - [ ] Request/response logging - - [x] Error logging with context (basic) - -- [x] Health monitoring **[Epic 8: Partially complete]** - - [x] `/health` endpoint (DB connection check) - - [ ] Fly.io metrics dashboard setup (pending deployment) - - [ ] Alert configuration for critical failures - -## Out of Scope (Future Phases) - -- Local change staging/indexing -- Advanced AST analysis beyond dependencies -- Multi-language symbol resolution (start with TS/JS only) -- Real-time collaboration features -- Self-hosted deployment options -- Advanced caching strategies (CDN, query result caching) - -## Success Metrics - -- **Latency**: p95 query response time < 200ms -- **Accuracy**: 95%+ precision on dependency analysis -- **Reliability**: 99.5% uptime for MCP endpoints -- **Autonomy**: 80%+ of ADW-generated PRs pass CI without human intervention -- **User Adoption**: MCP integration success rate (users who configure and successfully use KotaDB) - -## Implementation Priority - -**Phase 1A: Foundation** (Weeks 1-2) -1. Supabase schema design and migration (Epic 1) -2. Authentication + API keys (Epic 2) -3. Establish baseline testing harness (Epic 10 kick-off) - -**Phase 1B: Indexing Core** (Weeks 3-4) -1. Enhanced parsing pipeline (Epic 3) -2. Job queue + worker orchestration (Epic 4) -3. Expand unit tests around parsing/indexing (Epic 10) - -**Phase 1C: Integrations & API** (Weeks 5-6) -1. GitHub App integration & webhook flow (Epic 5) -2. REST API migration + OpenAPI contract (Epic 6) - -**Phase 1D: MCP & Operations** (Weeks 7-8) -1. MCP transport, protocol handlers, tools (Epic 7) -2. Monitoring & operational readiness (Epic 8) - -**Phase 1E: Launch Readiness** (Week 9) -1. CI/CD automation and deployment scripts (Epic 9) -2. Test hardening + E2E coverage (Epic 10) - -## Key Dependencies - -**New Packages to Add:** -- `pg-boss` - PostgreSQL-backed job queue -- `@supabase/supabase-js` - Supabase client -- `@typescript-eslint/parser` - AST parsing for TS/JS -- `@typescript-eslint/types` - TypeScript AST type definitions -- `bcryptjs` - API key hashing -- `@octokit/rest` - GitHub API client -- `openapi-typescript` - OpenAPI type generation (dev dependency) - -**MCP Protocol:** -- Implement SSE transport per [MCP specification](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports) -- No external MCP SDK needed—build directly against spec - -**Existing Stack (Keep):** -- Bun runtime -- TypeScript -- Biome (linting) - -## Dependencies & Coordination - -**Frontend Team**: -- Supabase schema review and approval -- OpenAPI spec review for REST endpoints -- GitHub App installation flow UX -- API key management UI -- Repository selection and status polling - -**External Services Setup**: -- GitHub App registration (manual, one-time) -- Supabase project creation (staging + prod) -- Fly.io app creation (staging + prod) -- Stripe integration (frontend-owned, backend aware of tiers) - ---- - -**Working Document**: This vision will evolve as we implement. Update this file when scope, architecture, or infrastructure decisions change. diff --git a/docs/vision/archive/manifesto.md b/docs/vision/archive/manifesto.md deleted file mode 100644 index 09947081..00000000 --- a/docs/vision/archive/manifesto.md +++ /dev/null @@ -1,327 +0,0 @@ -# The Future of Software is Autonomous Collaboration - -**A Manifesto for Agentic Engineering** - ---- - -## The Inflection Point - -We're living through the most significant shift in software development since the introduction of version control. For decades, we've built tools to make *humans* more productive: IDEs, linters, CI/CD pipelines, static analysis. We optimized for human speed, human memory, human cognition. - -**That era is over.** - -AI agents don't need syntax highlighting. They don't need code review checklists. They don't need Jira tickets or sprint planning meetings. They need something fundamentally different: **infrastructure for coordination, memory, and collaboration.** - -The question isn't "Can AI write code?" (it already does). The question is: **"How do we build systems where dozens of specialized AI agents coordinate to ship production software?"** - -This is the question KotaDB answers. - ---- - -## The Problem: Agents Don't Talk to Each Other - -Right now, if you want to use AI for software development, you have two options: - -### Option 1: Single Agent Systems -Use Claude Code, Cursor, or Copilot. One agent, one task at a time. Want to classify an issue? Ask Claude. Want to implement it? Ask Claude again. Want to review the code? Ask Claude a third time. - -**The problem**: You're the orchestrator. You're the memory. You're the state machine. The agent is a stateless function call. Every prompt starts from scratch. - -### Option 2: Custom Orchestration -Build your own system. Write Python scripts that call the Claude API. Chain agents together with subprocess calls. Persist state in JSON files. Manage git operations manually. Debug mysterious failures when agents conflict. - -**The problem**: You've just built a worse version of Kubernetes for agents. You're spending more time managing infrastructure than building features. - ---- - -## The Missing Layer: Multi-Agent Infrastructure - -What we need—what doesn't exist yet—is a **platform layer** for autonomous software development. The equivalent of Kubernetes for containers, but for AI agents. - -This platform must provide: - -### 1. **Standardized Communication** -Agents need a common language. Not REST APIs. Not GraphQL. Not bespoke JSON schemas. A **protocol** that works across LLM providers, tools, and custom agents. - -*This is what MCP (Model Context Protocol) provides.* - -### 2. **Resource Isolation** -When five agents work on the same codebase simultaneously, they need **isolated workspaces**. Not branches (too coarse). Not in-memory state (too fragile). Something that preserves git semantics while preventing conflicts. - -*This is what git worktrees provide.* - -### 3. **Persistent Memory** -Agents need to remember what they've done. Not just "I wrote a plan," but **where** the plan lives, **what** the next phase requires, **who** is responsible for executing it. - -*This is what state management provides.* - -### 4. **Security & Multi-Tenancy** -When agents access sensitive codebases or proprietary data, they need **authentication, rate limiting, and isolation**. Row-level security. Audit logs. The same guarantees we expect from production systems. - -*This is what Supabase + RLS provides.* - -### 5. **Workflow Orchestration** -Agents need to coordinate on complex, multi-phase workflows. Plan → Implement → Test → Review → Document. Each phase might use different agents. Failures need to retry. State needs to persist across phases. - -*This is what ADW (AI Developer Workflows) provides.* - ---- - -## The Vision: KotaDB as Infrastructure Layer - -**KotaDB is not a code search tool.** Code search is the *memory layer* that enables agents to understand codebases. The real product is the **orchestration infrastructure** that coordinates autonomous development workflows. - -Think of it this way: - -``` -GitHub = where humans collaborate on code -KotaDB = where agents collaborate on code - -Docker = how humans package applications -Agent Tools = how agents expose capabilities - -Kubernetes = how humans orchestrate containers -KotaDB ADW = how agents orchestrate workflows -``` - ---- - -## What This Looks Like in Practice - -Imagine this workflow: - -1. **User**: Creates GitHub issue: "Add rate limiting to /api/search endpoint" - -2. **Classifier Agent** (via KotaDB MCP): - - Calls `kotadb.search_code("rate limiting")` to find similar patterns - - Returns classification: `/feature` - -3. **Planner Agent** (via KotaDB MCP): - - Calls `kotadb.index_repository()` to refresh codebase context - - Calls `kotadb.search_code("middleware authentication")` to understand auth patterns - - Calls `kotadb.git_create_worktree("feat-rate-limit")` to get isolated workspace - - Writes plan to `docs/specs/feat-rate-limit.md` - - Calls `kotadb.git_commit()` to save plan - - Returns: `plan_file: "docs/specs/feat-rate-limit.md"` - -4. **Implementor Agent** (via KotaDB MCP): - - Reads plan from worktree - - Calls `kotadb.search_code("rate limit redis")` to find implementation examples - - Writes code: `app/src/middleware/rate-limit.ts` - - Calls `kotadb.git_commit()` to save implementation - -5. **Validator Agent** (via KotaDB MCP): - - Calls `kotadb.bun_validate()` to run lint, typecheck, tests - - Detects failure: "Type error in rate-limit.ts line 42" - - Returns feedback to Implementor Agent - -6. **Implementor Agent** (retry): - - Fixes type error based on feedback - - Calls `kotadb.git_commit()` to save fix - - Calls `kotadb.bun_validate()` again - - All checks pass ✅ - -7. **Reviewer Agent** (via KotaDB MCP): - - Calls `kotadb.search_code("rate limit test")` to verify test coverage - - Reads implementation from worktree - - Analyzes against plan - - Returns: `status: approved, blockers: []` - -8. **Documenter Agent** (via KotaDB MCP): - - Updates README.md with rate limiting documentation - - Calls `kotadb.git_commit()` to save docs - - Calls `kotadb.git_push_branch("feat-rate-limit")` to publish - -9. **PR Creator Agent** (via GitHub CLI): - - Creates pull request with summary - - Links to original issue - - Tags for human review - -**Total time**: 4 minutes. **Human intervention**: Zero (until PR review). - -This workflow is **impossible** with today's tools. You'd need custom glue code, manual state management, and brittle subprocess orchestration. **KotaDB makes it trivial.** - ---- - -## The Principles - -### 1. **Determinism + Creativity** -Agents bring creativity (LLM reasoning). Infrastructure brings determinism (predictable execution, reliable state, consistent APIs). Together, they produce **reliable autonomous systems**. - -### 2. **Composability Over Monoliths** -Don't build one super-agent that does everything. Build **specialized agents** (classifier, planner, implementor, reviewer) and compose them via workflows. Unix philosophy for AI. - -### 3. **Standards Over Silos** -Use MCP for communication. Use git for version control. Use standard databases for persistence. Don't invent new protocols. **Standardize on battle-tested infrastructure.** - -### 4. **Production-Grade, Not Prototypes** -Real authentication. Real rate limiting. Real error handling. Real logging. Real tests. If you wouldn't deploy it to production for human users, don't deploy it for agents. - -### 5. **Multi-Vendor by Default** -No lock-in. Claude for planning. OpenAI for implementation. Custom agents for security. Google for documentation. KotaDB coordinates them all. **Switzerland, not walled garden.** - ---- - -## The Moat - -Why is this hard? Why hasn't someone else built this? - -### 1. **Security is Hard** -Most MCP servers have no authentication (research found ~2,000 exposed servers). KotaDB has tier-based auth, rate limiting, and row-level security from day one. - -### 2. **Multi-Tenancy is Hard** -Isolating agents from each other requires deep understanding of databases, git semantics, and state management. Supabase RLS + worktrees + persistent state is a non-trivial combination. - -### 3. **Production is Hard** -Running one agent on a demo repo is easy. Running 65+ autonomous workflows on a real codebase with tests, CI/CD, and human collaboration is hard. KotaDB has done this. - -### 4. **Workflows are Hard** -Coordinating multi-phase SDLC workflows with retry logic, state persistence, and error handling requires deep software engineering expertise. Most AI companies don't have this DNA. - -**KotaDB has all four.** That's the moat. - ---- - -## The Market - -### Who Needs This? - -**Agentic Engineering Early Adopters** (today): -- Using Claude Code, Cursor, Copilot daily -- Frustrated by single-agent limitations -- Building custom orchestration scripts -- **Need**: Platform to coordinate multiple agents - -**Platform Engineering Teams** (6-12 months): -- Building internal developer platforms -- Standardizing on AI tooling -- Seeking self-hosted solutions -- **Need**: Infrastructure layer for agent workflows - -**AI-Native Startups** (12-24 months): -- Entire codebases managed by agents -- Minimal human engineering teams -- High tolerance for cutting-edge tech -- **Need**: Production-grade orchestration at scale - -### Market Size - -**TAM (Total Addressable Market)**: -- 31M software developers worldwide -- Average $100k/year salary -- **$3.1 trillion in developer productivity** - -**SAM (Serviceable Addressable Market)**: -- 10% early adopters (3.1M developers) -- $50/month average (solo + team tiers) -- **$1.86 billion annual** - -**SOM (Serviceable Obtainable Market)**: -- 0.1% market share (3,100 customers) -- $75/month average revenue per user -- **$2.79 million ARR** - -This is achievable within 18 months given: -- First-mover advantage -- Proven production usage -- Open-source community -- Strategic partnerships (Anthropic, Cursor, GitHub) - ---- - -## The Competition - -### What They're Building vs. What We're Building - -**LangChain, AutoGPT, CrewAI**: -- General-purpose agent frameworks -- Focus on RAG, chatbots, research agents -- Weak on software development workflows -- **We win**: Specialized for software development, production-grade - -**GitHub Copilot Workspace, Cursor**: -- Single-agent IDEs -- Monolithic architectures -- Closed ecosystems -- **We win**: Multi-agent, composable, open standards (MCP) - -**Replit Agent, Vercel v0**: -- End-to-end code generation -- Focus on greenfield projects -- Limited collaboration primitives -- **We win**: Brownfield support, SDLC workflows, agent coordination - -**CI/CD Platforms (GitHub Actions, CircleCI)**: -- YAML-driven automation -- No AI-native workflows -- Sequential execution -- **We win**: Autonomous decision-making, concurrent agents, self-correction - -**No one is building multi-agent infrastructure for software development.** This category doesn't exist yet. We're defining it. - ---- - -## The Timeline - -### Phase 1: Framework Core (Months 1-2) -**Goal**: Expose ADW capabilities via MCP - -- Build ADW MCP server (workflow orchestration tools) -- Create agent registry (catalog of available agents) -- Update documentation (framework-centric messaging) -- **Milestone**: 10 external agents registered, 100 workflows executed - -### Phase 2: Developer Experience (Months 3-4) -**Goal**: Make it trivial to build custom agents - -- Launch KotaDB CLI (agent management, workflow execution) -- Publish agent templates (Python, TypeScript, Rust) -- Build collaboration primitives (agent-to-agent messaging, resource locking) -- **Milestone**: 100 agents published, 1,000 workflows/month - -### Phase 3: Enterprise Platform (Months 5-6) -**Goal**: Production-ready for enterprise customers - -- Self-hosted deployment (Docker Compose, air-gapped) -- Agent marketplace (public registry, usage analytics) -- Compliance tooling (audit logs, SOC2 certification) -- **Milestone**: 10 enterprise customers, $100k MRR - ---- - -## The Call to Action - -**To Developers**: Stop building single-agent toys. Build multi-agent systems. KotaDB gives you the infrastructure. - -**To Companies**: Stop hiring more engineers to write boilerplate. Hire agents. KotaDB coordinates them. - -**To Investors**: This is the infrastructure layer for the next generation of software development. GitHub was $7.5B. Kubernetes changed the world. **KotaDB is the platform for autonomous development.** - ---- - -## The Future We're Building - -Five years from now, we'll look back at 2025 as the year software development fundamentally changed. The year we stopped *writing* code and started *orchestrating* agents to write it for us. - -The companies that win won't be the ones with the best LLMs. They'll be the ones with the best **infrastructure for agent collaboration**. The ones who figured out how to coordinate dozens of specialized agents to ship production software at scale. - -**That company is KotaDB.** - -We're not building a code search tool. We're building the operating system for autonomous software development. We're building the platform where the next million software projects will be built—not by humans, but by fleets of coordinated AI agents. - -The future of software is autonomous collaboration. - -**The future is KotaDB.** - ---- - -*"The best way to predict the future is to build it."* -— Alan Kay - ---- - -**Join us**: [kotadb.dev](https://kotadb.dev) (coming soon) -**Contribute**: [github.com/jayminwest/kota-db-ts](https://github.com/jayminwest/kota-db-ts) -**Discuss**: Discord (coming soon) - -*Written October 13, 2025 by the KotaDB team* diff --git a/docs/vision/epic-1-database-foundation.md b/docs/vision/epic-1-database-foundation.md deleted file mode 100644 index e4d70599..00000000 --- a/docs/vision/epic-1-database-foundation.md +++ /dev/null @@ -1,325 +0,0 @@ -# Epic 1: Database Foundation & Schema - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: ✅ 95% Complete (Production-Ready) -**Priority**: Critical (Blocks most other work) -**Estimated Duration**: 1-2 weeks -**Actual Duration**: ~2 weeks (completed October 2025) - -## Overview - -Establish the Supabase PostgreSQL database schema, migration system, and client configuration. This is the foundational epic that most other work depends on. - -## Completion Status - -**Overall**: ✅ 95% complete - Database schema implemented, migrations working, RLS policies active - -**Key Achievements**: -- 10-table schema (users, api_keys, organizations, repositories, index_jobs, indexed_files, symbols, references, dependencies, rate_limit_counters) -- Row Level Security (RLS) for multi-tenant isolation -- Migration system with sync between `app/src/db/migrations/` and `app/supabase/migrations/` -- `increment_rate_limit()` database function for atomic counter updates -- Integration tests use real Supabase Local (antimocking compliance) - -**Remaining Work**: -- 🟡 Index optimization for hot query paths (minor performance tuning) -- 🟡 Migration sync validation in CI (minor automation) - -## Issues - -### Issue #1: Design and implement Supabase schema - -**Priority**: P0 (Critical) -**Depends on**: None -**Blocks**: All other database work - -#### Description -Design and implement the complete Supabase schema with 8 core tables, foreign key relationships, indexes, and Row Level Security (RLS) policies. - -#### Acceptance Criteria -- [ ] All tables created with proper column types and constraints -- [ ] Foreign key relationships established -- [ ] Indexes on frequently queried columns (user_id, repo_id, symbol_name, etc.) -- [ ] RLS policies implemented for all tables -- [ ] Supporting database function for rate limit increments deployed -- [ ] Documentation of schema relationships and design decisions - -#### Tables to Create - -**Core tables:** -```sql --- Managed by Supabase Auth (reference only) -auth.users (id, email, created_at) - --- Custom tables -api_keys ( - id uuid PRIMARY KEY, - user_id uuid REFERENCES auth.users, - key_id text UNIQUE NOT NULL, -- public identifier embedded in key prefix - secret_hash text NOT NULL, -- bcrypt hash of secret portion - tier text CHECK (tier IN ('free', 'solo', 'team')), - org_id uuid REFERENCES organizations, - rate_limit_per_hour int NOT NULL, - created_at timestamptz, - last_used_at timestamptz, - enabled boolean DEFAULT true -) - -organizations ( - id uuid PRIMARY KEY, - name text NOT NULL, - created_at timestamptz, - owner_id uuid REFERENCES auth.users -) - -user_organizations ( - user_id uuid REFERENCES auth.users, - org_id uuid REFERENCES organizations, - role text CHECK (role IN ('owner', 'admin', 'member')), - PRIMARY KEY (user_id, org_id) -) - -rate_limit_counters ( - key_id text PRIMARY KEY, - window_start timestamptz NOT NULL, - request_count int NOT NULL, - updated_at timestamptz DEFAULT now() -) - --- Helper to increment counters atomically (exposed via Supabase RPC) -CREATE OR REPLACE FUNCTION increment_rate_limit( - p_key_id text, - p_window_start timestamptz -) -RETURNS TABLE (request_count int, reset_at timestamptz) AS $$ -BEGIN - INSERT INTO rate_limit_counters AS r (key_id, window_start, request_count, updated_at) - VALUES (p_key_id, p_window_start, 1, now()) - ON CONFLICT (key_id) - DO UPDATE SET - request_count = CASE - WHEN r.window_start = EXCLUDED.window_start THEN r.request_count + 1 - ELSE 1 - END, - window_start = CASE - WHEN r.window_start = EXCLUDED.window_start THEN r.window_start - ELSE EXCLUDED.window_start - END, - updated_at = now() - RETURNING request_count, - (window_start + interval '1 hour') AS reset_at; -END; -$$ LANGUAGE plpgsql; -``` - -**Repository management:** -```sql -repositories ( - id uuid PRIMARY KEY, - user_id uuid REFERENCES auth.users, - org_id uuid REFERENCES organizations, - full_name text NOT NULL, -- "owner/repo" - installation_id bigint, -- GitHub App installation ID - default_branch text, - last_indexed_at timestamptz, - last_indexed_commit text, - created_at timestamptz -) - -index_jobs ( - id uuid PRIMARY KEY, - repository_id uuid REFERENCES repositories, - status text CHECK (status IN ('pending', 'processing', 'completed', 'failed', 'skipped')), - commit_sha text, - started_at timestamptz, - completed_at timestamptz, - error_message text, - stats jsonb -- { files_processed, symbols_extracted, etc. } -) -``` - -**Code intelligence:** -```sql -indexed_files ( - id uuid PRIMARY KEY, - repository_id uuid REFERENCES repositories, - path text NOT NULL, - content text, - content_hash text, - language text, - size_bytes int, - indexed_at timestamptz, - UNIQUE (repository_id, path) -) - -symbols ( - id uuid PRIMARY KEY, - repository_id uuid REFERENCES repositories, - file_id uuid REFERENCES indexed_files, - name text NOT NULL, - kind text CHECK (kind IN ('function', 'class', 'interface', 'type', 'variable', 'const', 'export')), - line_start int, - line_end int, - column_start int, - column_end int, - signature text, - docstring text, - is_exported boolean DEFAULT false -) - -references ( - id uuid PRIMARY KEY, - repository_id uuid REFERENCES repositories, - symbol_id uuid REFERENCES symbols, - caller_file_id uuid REFERENCES indexed_files, - caller_line int, - caller_column int, - reference_type text CHECK (reference_type IN ('import', 'call', 'property_access', 'type_reference')) -) - -dependencies ( - id uuid PRIMARY KEY, - repository_id uuid REFERENCES repositories, - from_file_id uuid REFERENCES indexed_files, - to_file_id uuid REFERENCES indexed_files, - from_symbol_id uuid REFERENCES symbols, - to_symbol_id uuid REFERENCES symbols, - dependency_type text CHECK (dependency_type IN ('file_import', 'symbol_usage')) -) -``` - -#### RLS Policies -- Users can only see their own data (via `user_id` match) -- Team members can see org data (via `user_organizations` join) -- API key validation extracts `user_id` for RLS enforcement - -#### Technical Notes -- Use `uuid` for primary keys (Supabase default) -- Use `timestamptz` for all timestamps -- Use `jsonb` for flexible metadata (job stats, etc.) -- Create indexes on: `user_id`, `repository_id`, `symbol_id`, `file_id`, `name`, `api_keys.key_id` -- Store only hashed secrets (`secret_hash`) in `api_keys` -- Expose `increment_rate_limit` as a Supabase RPC function for rate limiting -- Enable full-text search on `symbols.name`, `symbols.docstring`, `indexed_files.content` - -#### Files to Create -- `src/db/schema.sql` - Complete schema definition -- `src/db/migrations/001_initial_schema.sql` - Initial migration -- `src/db/functions/increment_rate_limit.sql` - Postgres function backing rate limit RPC -- `docs/schema.md` - Schema documentation with ERD - ---- - -### Issue #2: Set up Supabase client and connection pooling - -**Priority**: P0 (Critical) -**Depends on**: #1 -**Blocks**: All database queries - -#### Description -Configure Supabase JavaScript client with proper connection management, environment-based configuration, and error handling. - -#### Acceptance Criteria -- [ ] Supabase client initialized with service role key (bypasses RLS for admin operations) -- [ ] Environment variables for `SUPABASE_URL`, `SUPABASE_SERVICE_KEY`, `SUPABASE_ANON_KEY` -- [ ] Connection health check function -- [ ] Automatic retry logic for transient errors -- [ ] Typed client wrapper for type safety - -#### Technical Notes -- Use `@supabase/supabase-js` package -- Service role key for backend operations (admin) -- Anon key for RLS-enforced operations (user context) -- Connection pooling handled by Supabase automatically - -#### Files to Create -- `src/db/client.ts` - Supabase client initialization -- `src/db/health.ts` - Connection health checks -- `.env.sample` - Updated with Supabase variables - -#### Example Implementation -```typescript -import { createClient } from '@supabase/supabase-js' -import type { Database } from './types' // Generated from schema - -const supabaseUrl = process.env.SUPABASE_URL! -const supabaseServiceKey = process.env.SUPABASE_SERVICE_KEY! - -export const supabase = createClient(supabaseUrl, supabaseServiceKey) - -export async function checkDatabaseHealth() { - const { error } = await supabase.from('repositories').select('count').limit(1) - return !error -} -``` - ---- - -### Issue #3: Implement migration system with rollback - -**Priority**: P1 (High) -**Depends on**: #1 -**Blocks**: CI/CD deployment (#32) - -#### Description -Build a migration system that tracks applied migrations, supports rollback, and integrates with CI/CD for automated deployment. - -#### Acceptance Criteria -- [ ] `migrations` table to track applied migrations -- [ ] Migration runner script (apply pending migrations) -- [ ] Rollback script (undo last migration) -- [ ] Up/down migration file format -- [ ] Dry-run mode for testing -- [ ] CI integration (migrate before deploy) - -#### Technical Notes -- Store migrations in `src/db/migrations/` as numbered files -- Track applied migrations in `migrations` table -- Each migration has `up.sql` and `down.sql` -- Fail fast if rollback is not possible -- Generate migration stubs with CLI tool - -#### Files to Create -- `src/db/migrate.ts` - Migration runner -- `src/db/rollback.ts` - Rollback script -- `scripts/migrate.sh` - Shell wrapper for CI -- `scripts/create-migration.sh` - Generate migration stub - -#### Migration Table -```sql -CREATE TABLE migrations ( - id serial PRIMARY KEY, - name text UNIQUE NOT NULL, - applied_at timestamptz DEFAULT now() -); -``` - -#### Example Migration File -``` -src/db/migrations/ - 001_initial_schema.sql - 001_initial_schema_rollback.sql - 002_add_indexes.sql - 002_add_indexes_rollback.sql -``` - ---- - -## Success Criteria - -- [ ] All 8 tables exist in Supabase with proper relationships -- [ ] RLS policies prevent unauthorized data access -- [ ] Supabase client connects successfully in all environments -- [ ] Migration system can apply and rollback changes -- [ ] Schema documentation is complete and accurate - -## Dependencies for Other Epics - -This epic must be completed before: -- Epic 2 (needs `api_keys` table) -- Epic 3 (needs code intelligence tables) -- Epic 4 (needs `index_jobs` table) -- Epic 5 (needs `repositories` table) -- Epic 6 (needs all tables for REST API) -- Epic 7 (needs all tables for MCP queries) diff --git a/docs/vision/epic-10-testing.md b/docs/vision/epic-10-testing.md deleted file mode 100644 index d32be8c1..00000000 --- a/docs/vision/epic-10-testing.md +++ /dev/null @@ -1,462 +0,0 @@ -# Epic 10: Comprehensive Testing - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: ✅ 88% Complete (Strong Coverage, Standardized Environment) -**Priority**: Critical (Enables autonomous development) -**Estimated Duration**: Ongoing (parallel with all other epics) -**Actual Progress**: 317 tests passing, antimocking philosophy enforced, MCP regression suite complete. Remaining: E2E tests, performance regression tests, OpenAPI contract tests. - -## Overview - -Build comprehensive test suite with 70%+ coverage. Critical for ADW workflows that implement features without human review until PR stage. - -## Issues - -### Issue #34: Unit tests for parsing and extraction - -**Priority**: P0 (Critical) -**Depends on**: #7 (test infrastructure), #8-11 (parsing pipeline) -**Blocks**: #32 (CI/CD requires passing tests) - -#### Description -Write unit tests for AST parsing, symbol extraction, reference extraction, and dependency graph building. - -#### Acceptance Criteria -- [ ] Test AST parser with valid and invalid syntax -- [ ] Test symbol extraction for all symbol types -- [ ] Test reference extraction for imports, calls, property accesses -- [ ] Test dependency graph building and circular detection -- [ ] 70%+ code coverage for `src/indexer/` modules -- [ ] Tests use fixture repositories -- [ ] All tests pass in CI - -#### Technical Notes -- Use Bun's built-in test runner -- Mock file system for parser tests -- Use known fixture repos with expected outputs -- Test edge cases: anonymous functions, default exports, type references - -#### Files to Create -- `tests/indexer/ast-parser.test.ts` -- `tests/indexer/symbol-extractor.test.ts` -- `tests/indexer/reference-extractor.test.ts` -- `tests/indexer/dependency-extractor.test.ts` - -#### Example Test -```typescript -import { describe, test, expect } from 'bun:test' -import { parseFile } from '@indexer/ast-parser' -import { extractSymbols } from '@indexer/symbol-extractor' - -describe('Symbol Extraction', () => { - test('extracts function declarations', () => { - const code = ` - function hello(name: string): string { - return 'Hello ' + name - } - ` - - const ast = parseFile('test.ts', code) - const symbols = extractSymbols(ast!, 'file-id') - - expect(symbols).toHaveLength(1) - expect(symbols[0].name).toBe('hello') - expect(symbols[0].kind).toBe('function') - expect(symbols[0].signature).toContain('name: string') - }) - - test('extracts class declarations with methods', () => { - const code = ` - class User { - constructor(public name: string) {} - greet() { return 'Hello' } - } - ` - - const ast = parseFile('test.ts', code) - const symbols = extractSymbols(ast!, 'file-id') - - const classSymbol = symbols.find((s) => s.kind === 'class') - expect(classSymbol?.name).toBe('User') - }) - - test('handles syntax errors gracefully', () => { - const code = 'const x = {' // Incomplete syntax - - const ast = parseFile('test.ts', code) - expect(ast).toBeNull() // Should not throw, just return null - }) -}) -``` - ---- - -### Issue #35: Unit tests for API and auth - -**Priority**: P0 (Critical) -**Depends on**: #7 (test infrastructure), #5-6 (auth), #20-22 (REST API) -**Blocks**: #32 (CI/CD) - -#### Description -Write unit tests for authentication middleware, rate limiting, and API endpoints. - -#### Acceptance Criteria -- [ ] Test API key validation (valid, invalid, expired) -- [ ] Test authentication middleware (401, 403 responses) -- [ ] Test rate limiting (within limit, exceeded) -- [ ] Test REST endpoints (success, error cases) -- [ ] Mock Supabase calls (don't hit real database) -- [ ] 70%+ code coverage for `src/auth/` and `src/api/` -- [ ] All tests pass in CI - -#### Technical Notes -- Use Bun's test runner with mocking -- Mock Supabase client responses -- Test error handling and edge cases -- Verify response headers (rate limit headers, etc.) - -#### Files to Create -- `tests/auth/keys.test.ts` -- `tests/auth/middleware.test.ts` -- `tests/auth/rate-limit.test.ts` -- `tests/api/search.test.ts` -- `tests/api/repositories.test.ts` -- `tests/api/jobs.test.ts` - -#### Example Test -```typescript -import { describe, test, expect, mock } from 'bun:test' -import { validateApiKey } from '@auth/keys' -import { authenticateRequest } from '@auth/middleware' - -describe('API Key Validation', () => { - test('validates correct API key', async () => { - const result = await validateApiKey('kota_dev_valid_key_123') - - expect(result).toBeTruthy() - expect(result?.userId).toBeDefined() - expect(result?.tier).toBe('free') - }) - - test('rejects invalid API key', async () => { - const result = await validateApiKey('invalid_key') - - expect(result).toBeNull() - }) - - test('rejects disabled API key', async () => { - const result = await validateApiKey('kota_dev_disabled_key') - - expect(result).toBeNull() - }) -}) - -describe('Authentication Helper', () => { - test('returns context for valid key', async () => { - const request = new Request('https://example.com/api', { - headers: { Authorization: 'Bearer kota_dev_valid_key' }, - }) - - const result = await authenticateRequest(request) - - expect(result.response).toBeUndefined() - expect(result.context?.userId).toBeDefined() - }) - - test('returns 401 response for missing key', async () => { - const request = new Request('https://example.com/api') - const result = await authenticateRequest(request) - - expect(result.response?.status).toBe(401) - expect(result.context).toBeUndefined() - }) -}) -``` - ---- - -### Issue #36: Integration tests for indexing pipeline - -**Priority**: P1 (High) -**Depends on**: #7 (test infrastructure), #14 (indexing worker), #18 (webhooks) -**Blocks**: Production deployment - -#### Description -Write integration tests for the full indexing pipeline: webhook → queue → worker → database. - -#### Acceptance Criteria -- [ ] Test full workflow with fixture repositories -- [ ] Verify GitHub webhook triggers job -- [ ] Verify job is queued and processed -- [ ] Verify data is stored correctly in database -- [ ] Test with real Supabase test database -- [ ] Test error handling and retries -- [ ] All tests pass in CI - -#### Technical Notes -- Use Supabase test project (not production) -- Seed database before each test -- Clean up after each test -- Mock GitHub API calls -- Test with both simple and complex repos - -#### Files to Create -- `tests/integration/indexing-pipeline.test.ts` -- `tests/integration/webhook-processing.test.ts` - -#### Example Test -```typescript -import { describe, test, expect, beforeEach, afterEach } from 'bun:test' -import { handleWebhook } from '@github/webhook-handler' -import { startIndexWorker } from '@queue/workers/index-repo' - -describe('Indexing Pipeline Integration', () => { - beforeEach(async () => { - await seedTestDatabase() - await startIndexWorker() - }) - - afterEach(async () => { - await cleanupTestDatabase() - }) - - test('indexes repository from webhook push', async () => { - // Simulate GitHub push webhook - const payload = { - repository: { full_name: 'test-org/test-repo' }, - ref: 'refs/heads/main', - after: 'abc123', - } - - const req = mockWebhookRequest(payload) - const res = mockResponse() - - await handleWebhook(req, res) - - // Wait for job to process - await waitForJobCompletion('abc123', 30000) - - // Verify data was indexed - const { data: files } = await supabase - .from('indexed_files') - .select('*') - .eq('repositories.full_name', 'test-org/test-repo') - - expect(files.length).toBeGreaterThan(0) - - // Verify symbols were extracted - const { data: symbols } = await supabase - .from('symbols') - .select('*') - .eq('file_id', files[0].id) - - expect(symbols.length).toBeGreaterThan(0) - }) - - test('handles parsing errors gracefully', async () => { - // Repository with invalid syntax files - // Should index successfully but skip bad files - }) -}) -``` - ---- - -### Issue #37: Integration tests for MCP protocol - -**Priority**: P1 (High) -**Depends on**: #7 (test infrastructure), #23-27 (MCP server) -**Blocks**: Production deployment - -#### Description -Write integration tests for MCP SSE connection, protocol handshake, and tool execution. - -#### Acceptance Criteria -- [ ] Test SSE connection lifecycle -- [ ] Test MCP initialize handshake -- [ ] Test tool discovery (tools/list) -- [ ] Test each tool execution (search_code, find_references, get_dependencies) -- [ ] Test error handling -- [ ] Test authentication via API key -- [ ] All tests pass in CI - -#### Technical Notes -- Use EventSource or SSE client library -- Test with real Supabase test database -- Seed database with known indexed data -- Verify response format matches MCP spec - -#### Files to Create -- `tests/integration/mcp-protocol.test.ts` -- `tests/integration/mcp-tools.test.ts` -- `tests/utils/mcp-client.ts` - Test MCP client - -#### Example Test -```typescript -import { describe, test, expect, beforeAll } from 'bun:test' -import { createMcpClient } from '@tests/utils/mcp-client' - -describe('MCP Protocol Integration', () => { - let client: McpClient - - beforeAll(async () => { - await seedTestDatabase() - client = await createMcpClient('kota_test_api_key') - }) - - test('establishes SSE connection', async () => { - expect(client.isConnected()).toBe(true) - }) - - test('completes initialize handshake', async () => { - const response = await client.send({ - jsonrpc: '2.0', - id: 1, - method: 'initialize', - params: { - protocolVersion: '2025-06-18', - clientInfo: { name: 'test-client', version: '1.0.0' }, - }, - }) - - expect(response.result.protocolVersion).toBe('2025-06-18') - expect(response.result.serverInfo.name).toBe('KotaDB') - }) - - test('lists available tools', async () => { - const response = await client.send({ - jsonrpc: '2.0', - id: 2, - method: 'tools/list', - }) - - expect(response.result.tools).toHaveLength(3) - expect(response.result.tools.map((t) => t.name)).toContain('search_code') - expect(response.result.tools.map((t) => t.name)).toContain('find_references') - expect(response.result.tools.map((t) => t.name)).toContain('get_dependencies') - }) - - test('executes search_code tool', async () => { - const response = await client.send({ - jsonrpc: '2.0', - id: 3, - method: 'tools/call', - params: { - name: 'search_code', - arguments: { query: 'function hello' }, - }, - }) - - expect(response.result.content).toBeDefined() - expect(response.result.content[0].type).toBe('text') - expect(response.result.content[0].text).toContain('hello') - }) -}) -``` - ---- - -### Issue #38: E2E tests for critical workflows - -**Priority**: P1 (High) -**Depends on**: All previous epics integrated -**Blocks**: Production launch - -#### Description -Write end-to-end tests for complete user workflows from repository addition to MCP query. - -#### Acceptance Criteria -- [ ] Test: User adds repo → indexing completes → MCP query returns results -- [ ] Test: Multiple users with data isolation (RLS) -- [ ] Test: Rate limiting enforcement -- [ ] Test: Error scenarios (repo not found, invalid API key, etc.) -- [ ] All tests pass in CI - -#### Technical Notes -- Use real Supabase test environment -- Test full stack: REST API, MCP API, job queue, database -- Verify RLS prevents cross-user data access -- Test performance (query latency benchmarks) - -#### Files to Create -- `tests/e2e/user-workflows.test.ts` -- `tests/e2e/multi-tenancy.test.ts` -- `tests/e2e/rate-limiting.test.ts` - -#### Example Test -```typescript -import { describe, test, expect } from 'bun:test' - -describe('E2E User Workflows', () => { - test('complete user journey: add repo → index → query', async () => { - // Step 1: Create user and API key - const user = await createTestUser() - const apiKey = await generateTestApiKey(user.id, 'free') - - // Step 2: Add repository - const repoResponse = await fetch('http://localhost:3000/api/repositories', { - method: 'POST', - headers: { - 'Authorization': `Bearer ${apiKey}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - fullName: 'test-org/test-repo', - installationId: 12345, - }), - }) - - expect(repoResponse.status).toBe(201) - const { repository, jobId } = await repoResponse.json() - - // Step 3: Wait for indexing to complete - await waitForJobCompletion(jobId, 60000) - - // Step 4: Query via MCP - const mcpClient = await createMcpClient(apiKey) - const searchResult = await mcpClient.callTool('search_code', { - query: 'function', - repository: 'test-org/test-repo', - }) - - expect(searchResult.content[0].text).toContain('function') - }) - - test('multi-user data isolation', async () => { - const user1 = await createTestUser() - const user2 = await createTestUser() - - const key1 = await generateTestApiKey(user1.id, 'free') - const key2 = await generateTestApiKey(user2.id, 'free') - - // User 1 adds repo - await addRepository(key1, 'user1/repo') - - // User 2 should not see User 1's repo - const user2Repos = await listRepositories(key2) - expect(user2Repos).not.toContainEqual(expect.objectContaining({ full_name: 'user1/repo' })) - }) -}) -``` - ---- - -## Success Criteria - -- [ ] 70%+ code coverage across all modules -- [ ] All unit tests pass in CI -- [ ] All integration tests pass in CI -- [ ] All E2E tests pass in CI -- [ ] Tests run in < 5 minutes total -- [ ] ADW-generated PRs pass tests 80%+ of the time - -## Dependencies for Other Epics - -This epic depends on: -- All other epics (tests validate implementations) - -This epic enables: -- Epic 9 (CI/CD requires passing tests) -- Autonomous development (ADW confidence) -- Production deployment (quality gate) diff --git a/docs/vision/epic-2-authentication.md b/docs/vision/epic-2-authentication.md deleted file mode 100644 index d16ce029..00000000 --- a/docs/vision/epic-2-authentication.md +++ /dev/null @@ -1,347 +0,0 @@ -# Epic 2: Authentication Infrastructure - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: ✅ 90% Complete (Production-Ready) -**Priority**: Critical (Blocks all API work) -**Estimated Duration**: 1 week -**Actual Duration**: ~1 week (completed October 2025) - -**Completion Summary**: API key system live, tier-based rate limiting enforced (free: 100/hr, solo: 1000/hr, team: 10000/hr). Remaining: organization management endpoints for team tier multi-tenancy. - -## Overview - -Implement API key generation, validation, and tier-based rate limiting. This authentication system protects both REST and MCP endpoints. - -## Issues - -### Issue #4: API key generation and storage system - -**Priority**: P0 (Critical) -**Depends on**: #1 (needs `api_keys` table), #2 (Supabase client) -**Blocks**: #5, #6 - -#### Description -Build API key generation, hashing, and validation system with support for three tiers (free, solo, team). - -#### Acceptance Criteria -- [ ] Generate keys with format `kota__.` -- [ ] Persist `key_id` separately from the hashed secret (`secret_hash`) -- [ ] Hash secret portion with bcrypt before storage -- [ ] Store tier information (free/solo/team) -- [ ] Associate keys with `user_id` and optional `org_id` -- [ ] Set rate limits based on tier -- [ ] Validate keys via single-row lookup on `key_id` -- [ ] Track `last_used_at` timestamp -- [ ] Support key revocation (set `enabled = false`) - -#### Technical Notes -- Use `bcryptjs` for hashing (rounds: 10) -- Generate `keyId` as a collision-resistant slug (e.g., 12 base32 chars) -- Secret segment: `crypto.randomBytes(18).toString('hex')` -- Store `key_id` and `secret_hash`; never persist full keys after creation -- Rate limits: free=100/hr, solo=1000/hr, team=10000/hr - -#### Files to Create -- `src/auth/keys.ts` - Key generation and validation -- `src/auth/types.ts` - Tier types and interfaces - -#### Example Implementation -```typescript -import bcrypt from 'bcryptjs' -import crypto from 'crypto' -import { supabase } from '@db/client' - -export type Tier = 'free' | 'solo' | 'team' - -const RATE_LIMITS: Record = { - free: 100, - solo: 1000, - team: 10000, -} - -export async function generateApiKey( - userId: string, - tier: Tier, - orgId?: string -): Promise<{ key: string; id: string }> { - const env = process.env.NODE_ENV === 'production' ? 'prod' : 'dev' - const keyId = crypto.randomBytes(9).toString('base64url') // ~12 chars - const secret = crypto.randomBytes(18).toString('base64url') - const key = `kota_${env}_${keyId}.${secret}` - const secretHash = await bcrypt.hash(secret, 10) - - const { data, error } = await supabase - .from('api_keys') - .insert({ - user_id: userId, - key_id: keyId, - secret_hash: secretHash, - tier, - org_id: orgId, - rate_limit_per_hour: RATE_LIMITS[tier], - }) - .select() - .single() - - if (error || !data) throw error - - return { key, id: data.id } // Return plain key only once -} - -export async function validateApiKey(key: string): Promise<{ - userId: string - tier: Tier - orgId?: string - keyId: string - rateLimitPerHour: number -} | null> { - if (!key?.startsWith('kota_') || !key.includes('.')) { - return null - } - - const [prefix, secret] = key.split('.') - const keyId = prefix.split('_').at(-1) - - if (!keyId || !secret) { - return null - } - - const { data, error } = await supabase - .from('api_keys') - .select('*') - .eq('key_id', keyId) - .eq('enabled', true) - .single() - - if (error || !data) { - return null - } - - const match = await bcrypt.compare(secret, data.secret_hash) - if (!match) { - return null - } - - await supabase - .from('api_keys') - .update({ last_used_at: new Date().toISOString() }) - .eq('id', data.id) - - return { - userId: data.user_id, - tier: data.tier, - orgId: data.org_id ?? undefined, - keyId: data.key_id, - rateLimitPerHour: data.rate_limit_per_hour, - } -} -``` - ---- - -### Issue #5: Authentication middleware - -**Priority**: P0 (Critical) -**Depends on**: #4 -**Blocks**: All REST and MCP endpoints - -#### Description -Create authentication middleware that validates API keys, extracts user context, and enforces RLS via Supabase client configuration. - -#### Acceptance Criteria -- [ ] Extract API key from `Authorization: Bearer ` header -- [ ] Validate key using `validateApiKey()` -- [ ] Inject user context into request object -- [ ] Configure Supabase client with RLS for user -- [ ] Return 401 for missing/invalid keys -- [ ] Return 403 for disabled keys -- [ ] Log authentication attempts (success/failure) - -#### Technical Notes -- Middleware runs before all protected routes -- Produce an `AuthContext` object (`userId`, `tier`, `orgId`, `keyId`, `rateLimitPerHour`) -- Use Supabase `auth.setSession()` or row level security helpers to impersonate the user -- Cache validation results briefly (5 sec) to reduce database load - -#### Files to Create -- `src/auth/middleware.ts` - Authentication middleware -- `src/auth/context.ts` - User context types - -#### Example Implementation -```typescript -export interface AuthContext { - userId: string - tier: Tier - orgId?: string - keyId: string - rateLimitPerHour: number -} - -export async function authenticateRequest( - request: Request -): Promise<{ context?: AuthContext; response?: Response }> { - const authHeader = request.headers.get('authorization') - if (!authHeader?.startsWith('Bearer ')) { - return { - response: new Response(JSON.stringify({ error: 'Missing API key' }), { - status: 401, - headers: { 'content-type': 'application/json' }, - }), - } - } - - const key = authHeader.slice(7) - const result = await validateApiKey(key) - - if (!result) { - return { - response: new Response(JSON.stringify({ error: 'Invalid API key' }), { - status: 401, - headers: { 'content-type': 'application/json' }, - }), - } - } - - const context: AuthContext = { - userId: result.userId, - tier: result.tier, - orgId: result.orgId, - keyId: result.keyId, - rateLimitPerHour: result.rateLimitPerHour, - } - - // Configure Supabase RLS for this user (auth token or impersonation) - - return { context } -} - -// Usage inside fetch handler -export async function handleProtectedRoute(request: Request): Promise { - const auth = await authenticateRequest(request) - if (auth.response) { - return auth.response - } - - const { context } = auth - // Route logic here, context is guaranteed - return new Response(JSON.stringify({ userId: context!.userId })) -} -``` - ---- - -### Issue #6: Rate limiting middleware - -**Priority**: P1 (High) -**Depends on**: #5 -**Blocks**: Production deployment - -#### Description -Implement tier-based rate limiting to prevent abuse and enforce plan limits. - -#### Acceptance Criteria -- [ ] Track requests per user per hour using the `rate_limit_counters` table -- [ ] Enforce tier-specific limits (free=100, solo=1000, team=10000) -- [ ] Return 429 with `Retry-After` header when limit exceeded -- [ ] Reset counters every hour via atomic updates -- [ ] Handle concurrent requests safely (row-level locking or `ON CONFLICT` semantics) -- [ ] Exclude health check endpoint from rate limiting - -#### Technical Notes -- Store counters in Postgres (`rate_limit_counters`) addressed by `key_id` -- Use `ON CONFLICT` upsert (or Supabase RPC) to increment counters atomically -- Reset window by comparing `window_start` with current hour; when expired, write a fresh row with `request_count = 1` -- Include current usage in response headers: `X-RateLimit-Limit`, `X-RateLimit-Remaining` - -#### Files to Create -- `src/auth/rate-limit.ts` - Rate limiting middleware - -#### Example Implementation -```typescript -import { supabase } from '@db/client' - -const ONE_HOUR_MS = 60 * 60 * 1000 - -export interface RateLimitResult { - allowed: boolean - remaining: number - retryAfter?: number -} - -export async function enforceRateLimit( - keyId: string, - rateLimitPerHour: number -): Promise { - const now = Date.now() - const windowStart = new Date(Math.floor(now / ONE_HOUR_MS) * ONE_HOUR_MS).toISOString() - - const { data, error } = await supabase.rpc('increment_rate_limit', { - key_id: keyId, - window_start: windowStart, - }) - - if (error) { - throw error - } - - const { request_count: requestCount, reset_at: resetAt } = data as { - request_count: number - reset_at: string - } - - const remaining = Math.max(0, rateLimitPerHour - requestCount) - const allowed = requestCount <= rateLimitPerHour - const retryAfter = allowed - ? undefined - : Math.ceil((new Date(resetAt).getTime() - now) / 1000) - - return { allowed, remaining, retryAfter } -} - -export async function withRateLimiting( - context: { keyId: string; rateLimitPerHour: number }, - handler: () => Promise -): Promise { - const result = await enforceRateLimit(context.keyId, context.rateLimitPerHour) - - const headers = new Headers({ - 'X-RateLimit-Limit': context.rateLimitPerHour.toString(), - 'X-RateLimit-Remaining': result.remaining.toString(), - }) - - if (!result.allowed) { - if (result.retryAfter !== undefined) { - headers.set('Retry-After', result.retryAfter.toString()) - } - - return new Response(JSON.stringify({ error: 'Rate limit exceeded' }), { - status: 429, - headers, - }) - } - - const response = await handler() - const finalResponse = new Response(response.body, response) - headers.forEach((value, key) => finalResponse.headers.set(key, value)) - return finalResponse -} -``` - ---- - -## Success Criteria - -- [ ] API keys can be generated and stored securely -- [ ] Keys validate correctly with bcrypt comparison -- [ ] Authentication middleware protects all endpoints -- [ ] Rate limiting enforces tier-specific limits -- [ ] 401/403/429 responses are clear and actionable -- [ ] User context is available in all authenticated requests - -## Dependencies for Other Epics - -This epic must be completed before: -- Epic 6 (REST API needs auth) -- Epic 7 (MCP API needs auth) -- Any endpoint that requires user context diff --git a/docs/vision/epic-3-code-parsing.md b/docs/vision/epic-3-code-parsing.md deleted file mode 100644 index 525b2291..00000000 --- a/docs/vision/epic-3-code-parsing.md +++ /dev/null @@ -1,390 +0,0 @@ -# Epic 3: Enhanced Code Parsing - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: 🟢 70% Complete (**NO LONGER BLOCKING MVP**) -**Priority**: High (Core functionality) -**Estimated Duration for Remaining Work**: 1 week -**Actual Progress**: AST parsing complete, symbol extraction complete, reference extraction complete (#75), dependency graph complete (#76). Symbol resolution for `find_references` tool remains. - -## Overview - -Migrate from regex-based parsing to proper AST parsing using `@typescript-eslint/parser`. Extract symbols, references, and dependencies with precise position information. - -## Current Status - -**Completion**: 70% (updated 2025-10-20) -**Blockers**: None (no longer blocking MVP) - -### Completed (as of 2025-10-20) -- ✅ AST parsing with `@typescript-eslint/parser` (#117) - Merged in PR #117 -- ✅ Symbol extraction (#74) - Merged in PR #126 -- ✅ **Reference extraction** (#75) - Merged in PR #225 - - Extracts imports, function calls, property accesses, type references - - Stores caller location (file, line, column) - - Handles aliased imports -- ✅ **Dependency graph extraction** (#76) - Merged in PR #226 - - File→file dependencies via imports - - Circular dependency detection during traversal - - Stored in `dependency_graph` table -- ✅ **`search_dependencies` MCP tool** (#116) - Merged in PR #229 - - Three search directions: dependents, dependencies, both - - Recursive traversal with configurable depth (1-5) - - Optional test file filtering - -### In Progress -- None - -### Remaining Work -- Symbol resolution for `find_references` MCP tool (~1 week) -- Type relationship extraction (interfaces, generics) - nice-to-have -- Docstring/comment extraction (JSDoc, TSDoc) - nice-to-have - -## Issues - -### Issue #7: Set up test infrastructure - -**Priority**: P0 (Critical) -**Depends on**: #1 (needs schema), #2 (needs Supabase client) -**Blocks**: All parsing work - -#### Description -Create test infrastructure with fixture repositories, database seeding, and test utilities. This enables TDD for parsing logic. - -#### Acceptance Criteria -- [ ] Test database setup/teardown scripts -- [ ] Fixture repositories with known structure - - Simple repo: 5 files, linear dependencies - - Complex repo: 20 files, circular dependencies, multiple languages -- [ ] Mock GitHub webhook payloads -- [ ] Test utilities for assertion helpers -- [ ] Documentation for adding new fixtures - -#### Technical Notes -- Use in-memory SQLite for fast unit tests -- Use Supabase test project for integration tests -- Fixtures should cover: imports, exports, function calls, type references -- Store fixtures in `tests/fixtures/` - -#### Files to Create -- `tests/setup.ts` - Test database setup -- `tests/fixtures/simple-repo/` - Simple test repository -- `tests/fixtures/complex-repo/` - Complex test repository -- `tests/utils.ts` - Test helper functions -- `tests/README.md` - Testing documentation - -#### Example Fixture Structure -``` -tests/fixtures/simple-repo/ - src/ - index.ts # Exports main(), imports utils - utils.ts # Exports helper functions - types.ts # Type definitions - package.json - tsconfig.json - -tests/fixtures/complex-repo/ - src/ - api/ - routes.ts # Imports handlers - handlers.ts # Circular dep with routes - db/ - schema.ts # Type definitions - queries.ts # Uses schema types - ... -``` - ---- - -### Issue #8: Migrate to @typescript-eslint/parser - -**Priority**: P0 (Critical) -**Depends on**: #7 -**Blocks**: #9, #10, #11 - -#### Description -Replace regex-based parsing with proper TypeScript/JavaScript AST parsing. Extract basic AST structure and handle all supported file types. - -#### Acceptance Criteria -- [ ] Parse `.ts`, `.tsx`, `.js`, `.jsx` files -- [ ] Extract full AST (Abstract Syntax Tree) -- [ ] Handle syntax errors gracefully (log and skip) -- [ ] Preserve source locations (line, column) for all nodes -- [ ] Support both TypeScript and JavaScript syntax -- [ ] Unit tests with fixture files - -#### Technical Notes -- Use `@typescript-eslint/parser` (supports both TS and JS) -- Parse options: `{ ecmaVersion: 'latest', sourceType: 'module' }` -- Don't fail on parse errors—log and continue -- Store AST in memory (don't persist entire AST to DB) - -#### Files to Create -- `src/indexer/ast-parser.ts` - AST parsing wrapper -- `src/indexer/ast-types.ts` - Type definitions for AST nodes -- `tests/indexer/ast-parser.test.ts` - Parser tests - -#### Example Implementation -```typescript -import { parse } from '@typescript-eslint/parser' -import type { TSESTree } from '@typescript-eslint/types' - -export function parseFile(filePath: string, content: string): TSESTree.Program | null { - try { - const ast = parse(content, { - ecmaVersion: 'latest', - sourceType: 'module', - loc: true, - range: true, - comment: true, - tokens: true, - filePath, - }) - return ast - } catch (error) { - console.error(`Failed to parse ${filePath}:`, error) - return null - } -} -``` - ---- - -### Issue #9: Implement symbol extraction - -**Priority**: P0 (Critical) -**Depends on**: #8 -**Blocks**: #10, #11 - -#### Description -Extract function, class, interface, type, and export declarations from AST. Store with precise location information. - -#### Acceptance Criteria -- [ ] Extract all function declarations and expressions -- [ ] Extract class declarations -- [ ] Extract interface and type alias declarations -- [ ] Extract variable declarations (const, let, var) -- [ ] Extract export statements (named and default) -- [ ] Store line/column positions (start and end) -- [ ] Extract function signatures (parameters, return type) -- [ ] Extract JSDoc/TSDoc comments as docstrings -- [ ] Mark symbols as exported or internal -- [ ] Unit tests for each symbol type - -#### Technical Notes -- Traverse AST using visitor pattern -- Handle edge cases: anonymous functions, arrow functions, default exports -- Store in `symbols` table with foreign key to `indexed_files` - -#### Files to Create -- `src/indexer/symbol-extractor.ts` - Symbol extraction logic -- `tests/indexer/symbol-extractor.test.ts` - Symbol extraction tests - -#### Example Implementation -```typescript -export function extractSymbols(ast: TSESTree.Program, fileId: string): Symbol[] { - const symbols: Symbol[] = [] - - visit(ast, { - FunctionDeclaration(node) { - symbols.push({ - fileId, - name: node.id?.name || '', - kind: 'function', - lineStart: node.loc.start.line, - lineEnd: node.loc.end.line, - columnStart: node.loc.start.column, - columnEnd: node.loc.end.column, - signature: buildSignature(node), - docstring: extractDocstring(node), - isExported: isExported(node), - }) - }, - - ClassDeclaration(node) { - // Similar extraction for classes - }, - - TSInterfaceDeclaration(node) { - // Extract interfaces - }, - - TSTypeAliasDeclaration(node) { - // Extract type aliases - }, - }) - - return symbols -} -``` - ---- - -### Issue #10: Implement reference extraction - -**Priority**: P1 (High) -**Depends on**: #9 -**Blocks**: #11 - -#### Description -Extract all references to symbols: imports, function calls, property accesses, and type references. - -#### Acceptance Criteria -- [ ] Extract import statements (all forms: named, default, namespace) -- [ ] Extract function calls (including method calls) -- [ ] Extract property accesses (for finding usage of exports) -- [ ] Extract type references (TypeScript `Type` syntax) -- [ ] Store caller location (file, line, column) -- [ ] Link references to symbols (by name, resolve later) -- [ ] Handle aliased imports (`import { foo as bar }`) -- [ ] Unit tests for each reference type - -#### Technical Notes -- Store in `references` table -- Link to `symbol_id` after symbol resolution (post-processing step) -- Handle both relative and absolute imports - -#### Files to Create -- `src/indexer/reference-extractor.ts` - Reference extraction logic -- `tests/indexer/reference-extractor.test.ts` - Reference extraction tests - -#### Example Implementation -```typescript -export function extractReferences(ast: TSESTree.Program, fileId: string): Reference[] { - const references: Reference[] = [] - - visit(ast, { - ImportDeclaration(node) { - node.specifiers.forEach((spec) => { - references.push({ - fileId, - symbolName: spec.local.name, - line: node.loc.start.line, - column: node.loc.start.column, - referenceType: 'import', - importSource: node.source.value, - }) - }) - }, - - CallExpression(node) { - if (node.callee.type === 'Identifier') { - references.push({ - fileId, - symbolName: node.callee.name, - line: node.loc.start.line, - column: node.loc.start.column, - referenceType: 'call', - }) - } - }, - - MemberExpression(node) { - // Extract property accesses - }, - - TSTypeReference(node) { - // Extract type references - }, - }) - - return references -} -``` - ---- - -### Issue #11: Build dependency graph extraction - -**Priority**: P1 (High) -**Depends on**: #10 -**Blocks**: MCP `get_dependencies` tool (#27) - -#### Description -Build file-to-file and symbol-to-symbol dependency graphs. Detect circular dependencies. - -#### Acceptance Criteria -- [ ] Extract file → file dependencies (via imports) -- [ ] Extract symbol → symbol dependencies (via usage) -- [ ] Resolve relative imports to absolute file paths -- [ ] Handle circular dependencies (detect and warn) -- [ ] Store in `dependencies` table -- [ ] Build recursive dependency tree -- [ ] Unit tests with circular and non-circular examples - -#### Technical Notes -- Use import paths to resolve file dependencies -- Use symbol references to resolve symbol dependencies -- Store both directions: forward deps (A imports B) and reverse deps (B imported by A) - -#### Files to Create -- `src/indexer/dependency-extractor.ts` - Dependency extraction -- `src/indexer/circular-detector.ts` - Circular dependency detection -- `tests/indexer/dependency-extractor.test.ts` - Dependency tests - -#### Example Implementation -```typescript -export function extractDependencies( - files: IndexedFile[], - symbols: Symbol[], - references: Reference[] -): Dependency[] { - const dependencies: Dependency[] = [] - - // Build file → file dependencies - references.forEach((ref) => { - if (ref.referenceType === 'import' && ref.importSource) { - const resolvedPath = resolveImport(ref.importSource, ref.fileId) - const targetFile = files.find((f) => f.path === resolvedPath) - - if (targetFile) { - dependencies.push({ - fromFileId: ref.fileId, - toFileId: targetFile.id, - dependencyType: 'file_import', - }) - } - } - }) - - // Build symbol → symbol dependencies - references.forEach((ref) => { - if (ref.referenceType === 'call') { - const targetSymbol = symbols.find((s) => s.name === ref.symbolName) - const callerSymbol = findCallerSymbol(symbols, ref) - - if (targetSymbol && callerSymbol) { - dependencies.push({ - fromSymbolId: callerSymbol.id, - toSymbolId: targetSymbol.id, - dependencyType: 'symbol_usage', - }) - } - } - }) - - return dependencies -} - -export function detectCircularDependencies(dependencies: Dependency[]): string[][] { - // Implement cycle detection (DFS or Tarjan's algorithm) -} -``` - ---- - -## Success Criteria - -- [ ] AST parser handles all TS/JS syntax correctly -- [ ] Symbols extracted with accurate position information -- [ ] References capture all imports, calls, and type usage -- [ ] Dependency graph is complete and accurate -- [ ] Circular dependencies are detected -- [ ] 70%+ test coverage for all parsing modules - -## Dependencies for Other Epics - -This epic enables: -- Epic 4 (indexing worker needs extraction pipeline) -- Epic 7 (MCP tools query extracted data) -- Epic 6 (REST API exposes extracted data) diff --git a/docs/vision/epic-4-job-queue.md b/docs/vision/epic-4-job-queue.md deleted file mode 100644 index af03acbd..00000000 --- a/docs/vision/epic-4-job-queue.md +++ /dev/null @@ -1,265 +0,0 @@ -# Epic 4: Job Queue & Background Processing - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: 🔴 0% Complete (**MVP BLOCKER**) -**Priority**: Critical (Enables async indexing) -**Estimated Duration**: 1-2 weeks -**Actual Progress**: Not started. All indexing runs synchronously. **Second-highest priority gap for MVP.** - -## Overview - -Implement pg-boss job queue for reliable async indexing. Workers consume jobs, process repositories, and update status for frontend visibility. - -## Issues - -### Issue #12: Set up pg-boss job queue - -**Priority**: P0 (Critical) -**Depends on**: #2 (Supabase Postgres connection) -**Blocks**: #13, #14 - -#### Description -Configure pg-boss to use Supabase Postgres as job store. Initialize queue, configure workers, and handle retries. - -#### Acceptance Criteria -- [ ] pg-boss initialized with Supabase connection -- [ ] Job queue tables created automatically by pg-boss -- [ ] Queue health check function -- [ ] Retry configuration (3 attempts, exponential backoff) -- [ ] Dead letter queue for permanently failed jobs -- [ ] Graceful shutdown on process termination - -#### Technical Notes -- pg-boss uses Postgres for job storage (no Redis needed) -- Creates its own schema (`pgboss`) with job tracking tables -- Handles locking, retries, and expiration automatically -- Requires a native Postgres connection string (e.g., `postgresql://user:pass@host:port/db`) -- Store the connection string in `SUPABASE_DB_URL` (separate from the REST `SUPABASE_URL`) - -#### Files to Create -- `src/queue/client.ts` - pg-boss initialization -- `src/queue/config.ts` - Queue configuration -- `src/queue/types.ts` - Job payload types - -#### Example Implementation -```typescript -import PgBoss from 'pg-boss' - -const connectionString = process.env.SUPABASE_DB_URL - -if (!connectionString) { - throw new Error('Missing SUPABASE_DB_URL environment variable') -} - -export const queue = new PgBoss({ - connectionString, - retryLimit: 3, - retryDelay: 60, - retryBackoff: true, - expireInHours: 24, - archiveCompletedAfterSeconds: 3600, -}) - -export async function startQueue() { - await queue.start() - console.log('Job queue started') -} - -export async function stopQueue() { - await queue.stop() - console.log('Job queue stopped') -} - -export async function checkQueueHealth(): Promise { - try { - await queue.getQueueSize('index-repo') - return true - } catch { - return false - } -} -``` - ---- - -### Issue #13: Implement job status tracking - -**Priority**: P1 (High) -**Depends on**: #12, #1 (needs `index_jobs` table) -**Blocks**: #14, #22 (job status API) - -#### Description -Track indexing job status in `index_jobs` table. Update status as jobs progress through queue. - -#### Acceptance Criteria -- [ ] Create job record when queued (status: pending) -- [ ] Update status when processing starts (status: processing) -- [ ] Update status when completed (status: completed, with stats) -- [ ] Update status when failed (status: failed, with error message) -- [ ] Store job metadata: commit SHA, started/completed timestamps -- [ ] Store job statistics: files processed, symbols extracted, etc. -- [ ] Link jobs to repositories - -#### Technical Notes -- `index_jobs` table is source of truth (not pg-boss tables) -- pg-boss job ID stored in `index_jobs.queue_job_id` for correlation -- Frontend polls `index_jobs` for status updates - -#### Files to Create -- `src/queue/job-tracker.ts` - Job status tracking functions - -#### Example Implementation -```typescript -export async function createIndexJob( - repositoryId: string, - commitSha: string -): Promise { - const { data, error } = await supabase - .from('index_jobs') - .insert({ - repository_id: repositoryId, - status: 'pending', - commit_sha: commitSha, - }) - .select() - .single() - - if (error) throw error - - // Queue the job with pg-boss - const jobId = await queue.send('index-repo', { - indexJobId: data.id, - repositoryId, - commitSha, - }) - - return data.id -} - -export async function updateJobStatus( - jobId: string, - status: 'processing' | 'completed' | 'failed', - metadata?: { - error?: string - stats?: { filesProcessed: number; symbolsExtracted: number } - } -) { - const updates: any = { status } - - if (status === 'processing') { - updates.started_at = new Date().toISOString() - } else if (status === 'completed' || status === 'failed') { - updates.completed_at = new Date().toISOString() - } - - if (metadata?.error) { - updates.error_message = metadata.error - } - - if (metadata?.stats) { - updates.stats = metadata.stats - } - - await supabase.from('index_jobs').update(updates).eq('id', jobId) -} -``` - ---- - -### Issue #14: Build indexing worker - -**Priority**: P0 (Critical) -**Depends on**: #11 (extraction pipeline), #13 (job tracking), #12 (queue) -**Blocks**: Production indexing - -#### Description -Implement worker that consumes `index-repo` jobs, orchestrates the full indexing pipeline, and handles errors gracefully. - -#### Acceptance Criteria -- [ ] Worker consumes jobs from `index-repo` queue -- [ ] Orchestrate full pipeline: clone → parse → extract → store -- [ ] Update job status at each stage -- [ ] Handle partial failures (e.g., some files fail to parse) -- [ ] Store extracted data atomically (transaction) -- [ ] Clean up temporary files after processing -- [ ] Retry logic via pg-boss (automatic) -- [ ] Log all worker activity with correlation IDs - -#### Technical Notes -- Clone repos to temp directory (`/tmp/kotadb-/`) -- Use GitHub App tokens for private repo access (from #16) -- Parse all supported files (TS/JS/JSX/TSX) -- Store symbols, references, dependencies in transaction -- Clean up even if job fails - -#### Files to Create -- `src/queue/workers/index-repo.ts` - Main indexing worker -- `src/queue/workers/orchestrator.ts` - Pipeline orchestration - -#### Example Implementation -```typescript -export async function startIndexWorker() { - await queue.work('index-repo', { teamSize: 3, teamConcurrency: 1 }, async (job) => { - const { indexJobId, repositoryId, commitSha } = job.data - - try { - await updateJobStatus(indexJobId, 'processing') - - // Step 1: Clone repository - const repoPath = await cloneRepository(repositoryId, commitSha) - - // Step 2: Parse all files - const files = await discoverFiles(repoPath) - const parsedFiles = await parseFiles(files) - - // Step 3: Extract symbols, references, dependencies - const symbols = await extractSymbols(parsedFiles) - const references = await extractReferences(parsedFiles) - const dependencies = await extractDependencies(parsedFiles, symbols, references) - - // Step 4: Store in database (transaction) - await storeIndexedData(repositoryId, { - files: parsedFiles, - symbols, - references, - dependencies, - }) - - // Step 5: Update job status - await updateJobStatus(indexJobId, 'completed', { - stats: { - filesProcessed: parsedFiles.length, - symbolsExtracted: symbols.length, - }, - }) - - // Step 6: Clean up - await fs.rm(repoPath, { recursive: true }) - } catch (error) { - await updateJobStatus(indexJobId, 'failed', { - error: error.message, - }) - throw error // pg-boss will retry - } - }) -} -``` - ---- - -## Success Criteria - -- [ ] pg-boss queue is operational with Supabase -- [ ] Jobs are tracked in `index_jobs` table -- [ ] Worker processes repos end-to-end successfully -- [ ] Failed jobs are retried automatically -- [ ] Job status is visible to frontend via API -- [ ] Worker logs are structured and queryable - -## Dependencies for Other Epics - -This epic enables: -- Epic 5 (GitHub webhooks queue indexing jobs) -- Epic 6 (REST API exposes job status) -- Epic 7 (MCP tools query indexed data) diff --git a/docs/vision/epic-5-github-integration.md b/docs/vision/epic-5-github-integration.md deleted file mode 100644 index 6b177e9f..00000000 --- a/docs/vision/epic-5-github-integration.md +++ /dev/null @@ -1,309 +0,0 @@ -# Epic 5: GitHub Integration - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: 🔴 0% Complete (**MVP BLOCKER**) -**Priority**: Critical (Enables auto-indexing) -**Estimated Duration**: 2 weeks -**Actual Progress**: Not started. No GitHub App, no webhooks. **Third-highest priority gap for MVP.** Depends on Epic 4 (job queue). - -## Overview - -Integrate GitHub App for repository access and webhooks. Enable auto-indexing on every push to tracked repositories. - -## Issues - -### Issue #15: Document GitHub App setup - -**Priority**: P2 (Medium) -**Depends on**: None (documentation only) -**Blocks**: None (but needed for #16) - -#### Description -Create comprehensive documentation for registering and configuring the KotaDB GitHub App. - -#### Acceptance Criteria -- [ ] Step-by-step registration guide -- [ ] Required permissions list -- [ ] Webhook configuration instructions -- [ ] Environment variables documentation -- [ ] Development vs production app setup -- [ ] Screenshots for clarity - -#### Required Permissions -- **Repository permissions:** - - Contents: Read (clone repos) - - Metadata: Read (repo info) -- **Account permissions:** - - None -- **Events (webhooks):** - - Push - -#### Files to Create -- `docs/github-app-setup.md` - Complete setup guide - -#### GitHub App Settings -``` -Name: KotaDB (or KotaDB Dev for development) -Homepage URL: https://kotadb.io -Callback URL: https://app.kotadb.io/auth/github/callback -Webhook URL: https://api.kotadb.io/webhooks/github -Webhook secret: - -Permissions: - - Repository contents: Read-only - - Repository metadata: Read-only - -Subscribe to events: - - Push - -Installation: - - Any account (for open-source) or Only this account (for private beta) -``` - -#### Environment Variables -``` -GITHUB_APP_ID=123456 -GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\n..." -GITHUB_WEBHOOK_SECRET= -``` - ---- - -### Issue #16: GitHub App token generation - -**Priority**: P1 (High) -**Depends on**: #15 (needs app credentials), #2 (Supabase client) -**Blocks**: #14 (worker needs tokens to clone) - -#### Description -Implement GitHub App installation token generation for accessing private repositories. - -#### Acceptance Criteria -- [ ] Generate JWT for GitHub App authentication -- [ ] Fetch installation access tokens -- [ ] Cache tokens (expire after 55 min, refresh before expiry) -- [ ] Handle token generation failures gracefully -- [ ] Support multiple installations (different users/orgs) -- [ ] Store `installation_id` in `repositories` table - -#### Technical Notes -- Use `@octokit/rest` for GitHub API -- Installation tokens valid for 1 hour -- Cache in memory (regenerate on worker startup) -- Private key from environment variable - -#### Files to Create -- `src/github/app-auth.ts` - App authentication and token generation -- `src/github/client.ts` - Octokit client factory - -#### Example Implementation -```typescript -import { App } from '@octokit/app' -import { Octokit } from '@octokit/rest' - -const app = new App({ - appId: process.env.GITHUB_APP_ID!, - privateKey: process.env.GITHUB_APP_PRIVATE_KEY!, -}) - -const tokenCache = new Map() - -export async function getInstallationToken(installationId: number): Promise { - const now = Date.now() - const cached = tokenCache.get(installationId) - - // Return cached token if still valid (with 5 min buffer) - if (cached && cached.expiresAt > now + 5 * 60 * 1000) { - return cached.token - } - - // Generate new token - const { token, expiresAt } = await app.octokit.rest.apps.createInstallationAccessToken({ - installation_id: installationId, - }) - - tokenCache.set(installationId, { - token, - expiresAt: new Date(expiresAt).getTime(), - }) - - return token -} - -export async function getOctokitForInstallation(installationId: number): Promise { - const token = await getInstallationToken(installationId) - return new Octokit({ auth: token }) -} -``` - ---- - -### Issue #17: Webhook receiver with verification - -**Priority**: P1 (High) -**Depends on**: #5 (auth middleware pattern), #15 (webhook secret) -**Blocks**: #18 - -#### Description -Implement webhook endpoint that receives GitHub events, verifies signatures, and logs requests. - -#### Acceptance Criteria -- [ ] POST /webhooks/github endpoint -- [ ] Verify HMAC signature using webhook secret -- [ ] Return 401 for invalid signatures -- [ ] Parse push events -- [ ] Log all webhook requests (headers + payload) -- [ ] Return 200 for valid requests -- [ ] Handle other event types gracefully (ignore for now) - -#### Technical Notes -- Signature in `X-Hub-Signature-256` header -- Verify with HMAC-SHA256 using webhook secret -- Event type in `X-GitHub-Event` header -- Only process `push` events initially - -#### Files to Create -- `src/github/webhook-handler.ts` - Webhook verification and parsing -- `src/api/webhooks.ts` - Webhook endpoint - -#### Example Implementation -```typescript -import crypto from 'crypto' - -export function verifyWebhookSignature( - payload: string, - signature: string, - secret: string -): boolean { - const hmac = crypto.createHmac('sha256', secret) - const digest = 'sha256=' + hmac.update(payload).digest('hex') - return crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(digest)) -} - -export async function handleWebhook(request: Request): Promise { - const signature = request.headers.get('x-hub-signature-256') ?? '' - const event = request.headers.get('x-github-event') ?? '' - const payload = await request.text() - - if (!verifyWebhookSignature(payload, signature, process.env.GITHUB_WEBHOOK_SECRET!)) { - return new Response(JSON.stringify({ error: 'Invalid signature' }), { - status: 401, - headers: { 'content-type': 'application/json' }, - }) - } - - console.log(`Received GitHub webhook: ${event}`) - - if (event === 'push') { - const body = JSON.parse(payload) - const { repository, ref, after: commitSha } = body - // Process in #18 - } else { - console.log(`Ignoring event type: ${event}`) - } - - return new Response(JSON.stringify({ received: true }), { - status: 200, - headers: { 'content-type': 'application/json' }, - }) -} -``` - ---- - -### Issue #18: Integrate webhooks with job queue - -**Priority**: P1 (High) -**Depends on**: #17, #12 (queue), #13 (job tracking) -**Blocks**: Auto-indexing - -#### Description -Connect webhook receiver to job queue. Queue indexing jobs when repos are pushed. - -#### Acceptance Criteria -- [ ] Parse push event payload -- [ ] Lookup repository in database by `full_name` -- [ ] Only queue job if repo is tracked by a user -- [ ] Extract commit SHA and ref (branch) -- [ ] Queue indexing job via `createIndexJob()` -- [ ] Update repository `last_push_at` timestamp -- [ ] Handle edge cases: force pushes, deleted branches -- [ ] Log all queued jobs - -#### Technical Notes -- Ignore pushes to untracked repositories (return 200, no action) -- Only index default branch or explicitly tracked branches -- Deduplicate: don't queue if job already pending for same commit - -#### Files to Create -- `src/github/webhook-processor.ts` - Webhook to queue bridge - -#### Example Implementation -```typescript -export async function processPushEvent(payload: any) { - const { repository, ref, after: commitSha } = payload - const fullName = repository.full_name // "owner/repo" - const branch = ref.replace('refs/heads/', '') - - // Lookup repository - const { data: repo, error } = await supabase - .from('repositories') - .select('*') - .eq('full_name', fullName) - .single() - - if (error || !repo) { - console.log(`Ignoring push to untracked repo: ${fullName}`) - return - } - - // Only index default branch (for now) - if (branch !== repo.default_branch) { - console.log(`Ignoring push to non-default branch: ${branch}`) - return - } - - // Check for existing pending job - const { data: existingJob } = await supabase - .from('index_jobs') - .select('id') - .eq('repository_id', repo.id) - .eq('commit_sha', commitSha) - .eq('status', 'pending') - .single() - - if (existingJob) { - console.log(`Job already queued for ${fullName}@${commitSha}`) - return - } - - // Queue new indexing job - const jobId = await createIndexJob(repo.id, commitSha) - console.log(`Queued indexing job ${jobId} for ${fullName}@${commitSha}`) - - // Update last push timestamp - await supabase - .from('repositories') - .update({ last_push_at: new Date().toISOString() }) - .eq('id', repo.id) -} -``` - ---- - -## Success Criteria - -- [ ] GitHub App is registered and configured -- [ ] Installation tokens are generated successfully -- [ ] Webhooks are received and verified -- [ ] Push events trigger indexing jobs -- [ ] Only tracked repositories are indexed -- [ ] Duplicate jobs are prevented - -## Dependencies for Other Epics - -This epic enables: -- Automatic indexing on push (core workflow) -- Epic 4 worker can clone private repos -- Epic 6 REST API can manage repo tracking diff --git a/docs/vision/epic-6-rest-api.md b/docs/vision/epic-6-rest-api.md deleted file mode 100644 index a818433d..00000000 --- a/docs/vision/epic-6-rest-api.md +++ /dev/null @@ -1,315 +0,0 @@ -# Epic 6: REST API Migration - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: 🟡 70% Complete (Partial) -**Priority**: High (Frontend dependency) -**Estimated Duration**: 1-2 weeks -**Actual Progress**: Core endpoints working. Remaining: repository management, job status polling (blocked by Epic 4), organization management, pagination. - -## Overview - -Migrate existing REST API to Supabase, add repository management endpoints, create OpenAPI specification for frontend coordination. - -## Issues - -### Issue #19: Create OpenAPI specification - -**Priority**: P1 (High) -**Depends on**: None (can start early) -**Blocks**: Frontend integration - -#### Description -Document all REST API endpoints in OpenAPI 3.0 format. Generate TypeScript types for frontend consumption. - -#### Acceptance Criteria -- [ ] OpenAPI 3.0 spec covering all endpoints -- [ ] Request/response schemas defined -- [ ] Authentication requirements documented -- [ ] Error responses documented (400, 401, 403, 404, 429, 500) -- [ ] TypeScript types generated via `openapi-typescript` -- [ ] Hosted spec for frontend access (in git or endpoint) - -#### Technical Notes -- Use OpenAPI 3.0 or 3.1 -- Store spec in `docs/openapi.yaml` -- CI validates implementation matches spec (future) -- Frontend runs `openapi-typescript docs/openapi.yaml -o types/api.ts` - -#### Files to Create -- `docs/openapi.yaml` - Complete OpenAPI specification -- `scripts/generate-types.sh` - Type generation script - -#### Endpoints to Document -```yaml -paths: - /health: - get: # Health check - /api/search: - get: # Search indexed files - /api/files/recent: - get: # Recent indexed files - /api/repositories: - get: # List user's repositories - post: # Add repository to track - /api/repositories/{id}: - get: # Get repository details - patch: # Update repository settings - delete: # Stop tracking repository - /api/jobs: - get: # List indexing jobs - /api/jobs/{id}: - get: # Get job status -``` - ---- - -### Issue #20: Migrate existing endpoints to Supabase - -**Priority**: P0 (Critical) -**Depends on**: #2 (Supabase client), #5 (auth), #11 (indexed data) -**Blocks**: Frontend UX - -#### Description -Migrate `/search`, `/files/recent`, and `/index` endpoints to use Supabase instead of SQLite. - -#### Acceptance Criteria -- [ ] GET /search queries `indexed_files` with full-text search -- [ ] GET /files/recent queries `indexed_files` ordered by `indexed_at` -- [ ] POST /index creates repository and queues job -- [ ] All endpoints require authentication -- [ ] Responses match OpenAPI spec -- [ ] Maintain backward compatibility during transition - -#### Technical Notes -- Use Supabase `.textSearch()` for full-text search -- Apply RLS automatically via authenticated client -- Filter by `user_id` implicitly (RLS handles it) -- Pagination via `limit` and `offset` parameters - -#### Files to Update -- `src/api/queries.ts` - Query functions -- `src/api/routes.ts` - Route handlers - -#### Example: Search Endpoint -```typescript -// GET /api/search?term=foo&limit=20 -export async function searchCode( - request: Request, - context: AuthContext -): Promise { - const { searchParams } = new URL(request.url) - const term = searchParams.get('term') - const limit = Number(searchParams.get('limit') ?? '20') - const project = searchParams.get('project') - - if (!term) { - return new Response(JSON.stringify({ error: 'Missing term parameter' }), { - status: 400, - headers: { 'content-type': 'application/json' }, - }) - } - - let query = supabase - .from('indexed_files') - .select('id, path, content, language, repository_id, repositories(full_name)') - .textSearch('content', term) - .limit(limit) - - if (project) { - query = query.eq('repositories.full_name', project) - } - - const { data, error } = await query - - if (error) { - return new Response(JSON.stringify({ error: error.message }), { - status: 500, - headers: { 'content-type': 'application/json' }, - }) - } - - return new Response(JSON.stringify({ results: data }), { - status: 200, - headers: { 'content-type': 'application/json' }, - }) -} -``` - ---- - -### Issue #21: Add repository management endpoints - -**Priority**: P1 (High) -**Depends on**: #5 (auth), #16 (GitHub tokens) -**Blocks**: Frontend repo selection - -#### Description -Build endpoints for adding, listing, updating, and removing tracked repositories. - -#### Acceptance Criteria -- [ ] POST /api/repositories adds new repository - - Validates user has access via GitHub App - - Stores `installation_id` and `full_name` - - Triggers initial indexing job -- [ ] GET /api/repositories lists user's repos with status -- [ ] GET /api/repositories/:id returns details with latest job -- [ ] PATCH /api/repositories/:id updates settings (e.g., branch to index) -- [ ] DELETE /api/repositories/:id stops tracking (soft delete or hard?) -- [ ] All endpoints enforce user ownership (RLS) - -#### Technical Notes -- Verify user access via GitHub API before adding repo -- Store GitHub `installation_id` for token generation -- Initial index job queued on POST - -#### Files to Create -- `src/api/repositories.ts` - Repository management handlers - -#### Example: Add Repository -```typescript -// POST /api/repositories -// Body: { fullName: "owner/repo", installationId: 12345 } -export async function addRepository( - request: Request, - context: AuthContext -): Promise { - const body = await request.json() - const { fullName, installationId } = body - - if (!fullName || !installationId) { - return new Response(JSON.stringify({ error: 'Missing fullName or installationId' }), { - status: 400, - headers: { 'content-type': 'application/json' }, - }) - } - - const [owner, repoName] = fullName.split('/') - - // Verify user has access via GitHub App - const octokit = await getOctokitForInstallation(installationId) - const { data: repo } = await octokit.rest.repos.get({ owner, repo: repoName }) - - if (!repo) { - return new Response(JSON.stringify({ error: 'Repository not found or no access' }), { - status: 404, - headers: { 'content-type': 'application/json' }, - }) - } - - const { data: dbRepo, error } = await supabase - .from('repositories') - .insert({ - user_id: context.userId, - full_name: fullName, - installation_id: installationId, - default_branch: repo.default_branch, - }) - .select() - .single() - - if (error || !dbRepo) { - return new Response(JSON.stringify({ error: error?.message ?? 'Failed to persist repository' }), { - status: 400, - headers: { 'content-type': 'application/json' }, - }) - } - - // Queue initial indexing job - const jobId = await createIndexJob(dbRepo.id, repo.default_branch) - - return new Response(JSON.stringify({ repository: dbRepo, jobId }), { - status: 201, - headers: { 'content-type': 'application/json' }, - }) -} -``` - ---- - -### Issue #22: Add job status polling endpoints - -**Priority**: P1 (High) -**Depends on**: #13 (job tracking) -**Blocks**: Frontend status UX - -#### Description -Expose indexing job status for frontend polling and progress displays. - -#### Acceptance Criteria -- [ ] GET /api/jobs lists jobs for user's repositories - - Filter by repository, status - - Paginate results - - Include job metadata and stats -- [ ] GET /api/jobs/:id returns single job details - - Include progress, logs, errors - - Include repository info -- [ ] Both endpoints enforce user ownership (RLS) - -#### Technical Notes -- Join `index_jobs` with `repositories` to filter by user -- Return job stats (`filesProcessed`, `symbolsExtracted`) -- Frontend polls every 5 seconds while jobs are pending/processing - -#### Files to Create -- `src/api/jobs.ts` - Job status handlers - -#### Example: List Jobs -```typescript -// GET /api/jobs?repository_id=uuid&status=processing -export async function listJobs( - request: Request, - context: AuthContext -): Promise { - const { searchParams } = new URL(request.url) - const repositoryId = searchParams.get('repository_id') - const status = searchParams.get('status') - const limit = Number(searchParams.get('limit') ?? '50') - - let query = supabase - .from('index_jobs') - .select('*, repositories!inner(*)') - .eq('repositories.user_id', context.userId) - .order('created_at', { ascending: false }) - .limit(limit) - - if (repositoryId) { - query = query.eq('repository_id', repositoryId) - } - - if (status) { - query = query.eq('status', status) - } - - const { data, error } = await query - - if (error) { - return new Response(JSON.stringify({ error: error.message }), { - status: 500, - headers: { 'content-type': 'application/json' }, - }) - } - - return new Response(JSON.stringify({ jobs: data }), { - status: 200, - headers: { 'content-type': 'application/json' }, - }) -} -``` - ---- - -## Success Criteria - -- [ ] OpenAPI spec is complete and accurate -- [ ] All existing endpoints migrated to Supabase -- [ ] Repository management endpoints functional -- [ ] Job status endpoints provide real-time visibility -- [ ] Frontend can generate types from OpenAPI spec -- [ ] All endpoints protected by authentication and RLS - -## Dependencies for Other Epics - -This epic enables: -- Frontend repository selection and status tracking -- User onboarding flow (add repo → index → query) diff --git a/docs/vision/epic-7-mcp-server.md b/docs/vision/epic-7-mcp-server.md deleted file mode 100644 index 70aa966c..00000000 --- a/docs/vision/epic-7-mcp-server.md +++ /dev/null @@ -1,466 +0,0 @@ -# Epic 7: MCP Server Implementation - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: ✅ 98% Complete (Production-Ready) -**Priority**: Critical (Core value proposition) -**Estimated Duration**: 2 weeks -**Actual Duration**: ~3 weeks (completed October 2025) - -**Completion Summary**: HTTP JSON-RPC implementation complete (using `@modelcontextprotocol/sdk` v1.20+). **Four tools working**: `search_code`, `index_repository`, `list_recent_files`, **`search_dependencies`** (#116, added 2025-10-20). 122/132 MCP tests passing (92.4% coverage). **Technical decision**: HTTP JSON-RPC instead of SSE for simpler error handling. - -**Remaining Work**: `find_references` tool (requires symbol resolution from Epic 3, ~1 week). - -## Overview - -Implement Model Context Protocol (MCP) server with SSE transport. Build three MVP tools: `search_code`, `find_references`, `get_dependencies`. - -## Current Status - -**Completion**: 98% (updated 2025-10-20) -**Blockers**: None (production-ready) - -### Completed (as of 2025-10-20) -- ✅ HTTP JSON-RPC transport (using `@modelcontextprotocol/sdk` v1.20+) -- ✅ Per-request server isolation (stateless design) -- ✅ Authentication integration (API keys + rate limiting) -- ✅ Four MCP tools operational: - - `search_code` - Full-text search across indexed files - - `index_repository` - Trigger repository indexing - - `list_recent_files` - Query recently indexed files - - **`search_dependencies`** (#116, merged PR #229) - NEW - - Three search directions: dependents (reverse), dependencies (forward), both - - Recursive traversal with depth 1-5 - - Circular dependency detection - - Test file filtering -- ✅ 122/132 MCP tests passing (92.4% coverage) -- ✅ Integration guide (`docs/guides/mcp-claude-code-integration.md`) - -### In Progress -- None - -### Remaining Work (2%) -- `find_references` tool - requires symbol resolution (~1 week, Epic 3 dependency) -- Advanced tools (future): `analyze_impact`, `get_type_hierarchy` - -## Issues - -### Issue #23: Implement SSE transport layer - -**Priority**: P0 (Critical) -**Depends on**: #5 (auth middleware) -**Blocks**: #24, #25, #26, #27 - -#### Description -Implement Server-Sent Events (SSE) transport for MCP protocol per specification. Handle connection lifecycle and event streaming. - -#### Acceptance Criteria -- [ ] GET /mcp/ endpoint with SSE streaming -- [ ] Authentication via API key in query param or header -- [ ] Connection lifecycle: open → stream events → close -- [ ] Heartbeat events every 30 seconds -- [ ] Proper SSE formatting (`event:`, `data:`, `id:`) -- [ ] Handle client disconnections gracefully -- [ ] Support concurrent connections per user - -#### Technical Notes -- SSE spec: https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events -- MCP SSE transport: https://modelcontextprotocol.io/specification/2025-06-18/basic/transports -- Headers: `Content-Type: text/event-stream`, `Cache-Control: no-cache`, `Connection: keep-alive` -- Send newline after each event block - -#### Files to Create -- `src/mcp/transport/sse.ts` - SSE transport implementation -- `src/mcp/transport/connection.ts` - Connection management - -#### Example Implementation -```typescript -const encoder = new TextEncoder() - -export async function handleMcpSseConnection(request: Request): Promise { - const url = new URL(request.url) - const bearer = request.headers.get('authorization') - const apiKey = url.searchParams.get('apiKey') ?? bearer?.replace('Bearer ', '') ?? '' - const auth = await validateApiKey(apiKey) - - if (!auth) { - return new Response(JSON.stringify({ error: 'Invalid API key' }), { - status: 401, - headers: { 'content-type': 'application/json' }, - }) - } - - const stream = new ReadableStream({ - start(controller) { - controller.enqueue(encoder.encode(':connected\n\n')) - - const heartbeat = setInterval(() => { - controller.enqueue(encoder.encode(':heartbeat\n\n')) - }, 30_000) - - const close = () => { - clearInterval(heartbeat) - controller.close() - } - - request.signal.addEventListener('abort', close) - - // TODO: wire MCP message pump (#24) - }, - }) - - return new Response(stream, { - headers: { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - Connection: 'keep-alive', - 'X-Accel-Buffering': 'no', - }, - }) -} -``` - ---- - -### Issue #24: MCP protocol handlers - -**Priority**: P0 (Critical) -**Depends on**: #23 -**Blocks**: #25, #26, #27 - -#### Description -Implement MCP protocol message handling: initialization, tool discovery, tool execution, error handling. - -#### Acceptance Criteria -- [ ] Handle `initialize` request (handshake) -- [ ] Handle `tools/list` request (tool discovery) -- [ ] Handle `tools/call` request (tool execution) -- [ ] Return proper MCP message format (JSON-RPC style) -- [ ] Include request IDs for correlation -- [ ] Handle invalid requests with error responses -- [ ] Log all MCP interactions - -#### Technical Notes -- MCP spec: https://modelcontextprotocol.io/specification/2025-06-18/basic/messages -- Messages are JSON objects sent as SSE `data:` payloads -- Each message has `jsonrpc: "2.0"`, `id`, `method`, `params` -- Responses have `jsonrpc: "2.0"`, `id`, `result` (or `error`) - -#### Files to Create -- `src/mcp/protocol/handler.ts` - Protocol message router -- `src/mcp/protocol/types.ts` - MCP message types -- `src/mcp/protocol/messages.ts` - Message builders - -#### Example Implementation -```typescript -export async function handleMcpMessage(message: any, userId: string): Promise { - const { jsonrpc, id, method, params } = message - - if (jsonrpc !== '2.0') { - return buildError(id, -32600, 'Invalid JSON-RPC version') - } - - switch (method) { - case 'initialize': - return buildResult(id, { - protocolVersion: '2025-06-18', - serverInfo: { - name: 'KotaDB', - version: '0.1.0', - }, - capabilities: { - tools: {}, - }, - }) - - case 'tools/list': - return buildResult(id, { - tools: [ - { - name: 'search_code', - description: 'Search for code across indexed repositories', - inputSchema: { - type: 'object', - properties: { - query: { type: 'string' }, - repository: { type: 'string' }, - limit: { type: 'number', default: 20 }, - }, - required: ['query'], - }, - }, - // Additional tools from #25-27 - ], - }) - - case 'tools/call': - return await executeTool(id, params, userId) - - default: - return buildError(id, -32601, `Method not found: ${method}`) - } -} - -function buildResult(id: any, result: any) { - return { jsonrpc: '2.0', id, result } -} - -function buildError(id: any, code: number, message: string) { - return { jsonrpc: '2.0', id, error: { code, message } } -} -``` - ---- - -### Issue #25: Implement search_code tool - -**Priority**: P1 (High) -**Depends on**: #24, #20 (search query logic) -**Blocks**: MVP launch - -#### Description -Implement `search_code` MCP tool for full-text search across indexed files. - -#### Acceptance Criteria -- [ ] Accept parameters: `query` (required), `repository` (optional), `limit` (optional) -- [ ] Query `indexed_files` with full-text search -- [ ] Filter by repository if specified -- [ ] Return results with context snippets -- [ ] Format results for LLM consumption (concise, structured) -- [ ] Enforce user's RLS (only search their repos) -- [ ] Handle errors gracefully - -#### Technical Notes -- Reuse query logic from REST `/search` endpoint -- Return file path, line numbers, and surrounding context -- Limit to 20 results by default, max 100 - -#### Files to Create -- `src/mcp/tools/search-code.ts` - Search tool implementation - -#### Example Implementation -```typescript -export async function searchCode(params: any, userId: string) { - const { query, repository, limit = 20 } = params - - if (!query || typeof query !== 'string') { - throw new Error('Parameter "query" is required and must be a string') - } - - let dbQuery = supabase - .from('indexed_files') - .select('path, content, language, repositories(full_name)') - .textSearch('content', query) - .limit(Math.min(limit, 100)) - - if (repository) { - dbQuery = dbQuery.eq('repositories.full_name', repository) - } - - const { data, error } = await dbQuery - - if (error) throw error - - // Format for LLM - const results = data.map((file) => ({ - repository: file.repositories.full_name, - path: file.path, - language: file.language, - snippet: extractSnippet(file.content, query), // 5 lines of context - })) - - return { - content: [ - { - type: 'text', - text: formatSearchResults(results), - }, - ], - } -} - -function formatSearchResults(results: any[]): string { - if (results.length === 0) { - return 'No results found.' - } - - return results.map((r, i) => - `${i + 1}. ${r.repository}/${r.path} (${r.language})\n${r.snippet}` - ).join('\n\n') -} -``` - ---- - -### Issue #26: Implement find_references tool - -**Priority**: P1 (High) -**Depends on**: #24, #10 (references extracted) -**Blocks**: MVP launch - -#### Description -Implement `find_references` MCP tool to find all locations where a symbol is used. - -#### Acceptance Criteria -- [ ] Accept parameters: `symbol` (required), `repository` (optional) -- [ ] Query `references` table for symbol name -- [ ] Join with `symbols` to get definition location -- [ ] Join with `indexed_files` to get file paths -- [ ] Return list of references with file, line, and context -- [ ] Format as "what will break if I change this" narrative -- [ ] Enforce user's RLS - -#### Technical Notes -- Symbol lookup is case-sensitive (or configurable) -- Include reference type (import, call, property access) -- Group by file for readability - -#### Files to Create -- `src/mcp/tools/find-references.ts` - Find references tool - -#### Example Implementation -```typescript -export async function findReferences(params: any, userId: string) { - const { symbol, repository } = params - - if (!symbol || typeof symbol !== 'string') { - throw new Error('Parameter "symbol" is required and must be a string') - } - - let query = supabase - .from('references') - .select(` - id, - caller_line, - reference_type, - indexed_files!caller_file_id(path, content), - symbols!symbol_id(name, file_id, line_start), - repositories(full_name) - `) - .eq('symbols.name', symbol) - - if (repository) { - query = query.eq('repositories.full_name', repository) - } - - const { data, error } = await query - - if (error) throw error - - // Format for LLM - const grouped = groupByFile(data) - const summary = `Symbol "${symbol}" is referenced in ${grouped.length} file(s):` - - const details = grouped.map((group) => { - const refs = group.references.map((r) => - ` Line ${r.caller_line}: ${r.reference_type}` - ).join('\n') - - return `${group.file}\n${refs}` - }).join('\n\n') - - return { - content: [ - { - type: 'text', - text: `${summary}\n\n${details}`, - }, - ], - } -} -``` - ---- - -### Issue #27: Implement get_dependencies tool - -**Priority**: P1 (High) -**Depends on**: #24, #11 (dependencies extracted) -**Blocks**: MVP launch - -#### Description -Implement `get_dependencies` MCP tool to build dependency graph for a file or symbol. - -#### Acceptance Criteria -- [ ] Accept parameters: `path` or `symbol` (one required), `repository` (optional), `recursive` (optional) -- [ ] Query `dependencies` table -- [ ] Build dependency tree (what this imports) -- [ ] Support recursive traversal (dependencies of dependencies) -- [ ] Detect and report circular dependencies -- [ ] Format as tree or list for LLM -- [ ] Enforce user's RLS - -#### Technical Notes -- Limit recursion depth to 5 to prevent infinite loops -- Return both file-level and symbol-level dependencies -- Include import paths for clarity - -#### Files to Create -- `src/mcp/tools/get-dependencies.ts` - Dependency graph tool - -#### Example Implementation -```typescript -export async function getDependencies(params: any, userId: string) { - const { path, symbol, repository, recursive = false } = params - - if (!path && !symbol) { - throw new Error('Either "path" or "symbol" parameter is required') - } - - let dependencies: Dependency[] = [] - - if (path) { - dependencies = await getFileDependencies(path, repository, recursive) - } else { - dependencies = await getSymbolDependencies(symbol, repository, recursive) - } - - // Format as tree - const tree = buildDependencyTree(dependencies) - const circular = detectCircular(dependencies) - - let result = `Dependencies:\n${formatTree(tree)}` - - if (circular.length > 0) { - result += `\n\nCircular dependencies detected:\n${formatCircular(circular)}` - } - - return { - content: [ - { - type: 'text', - text: result, - }, - ], - } -} - -async function getFileDependencies( - path: string, - repository: string | undefined, - recursive: boolean -): Promise { - // Query dependencies table, optionally recurse -} -``` - ---- - -## Success Criteria - -- [ ] SSE connection established and maintained -- [ ] MCP protocol handshake successful -- [ ] Tool discovery returns all three tools -- [ ] All three tools execute and return correct results -- [ ] Claude Code can connect and query successfully -- [ ] Error handling is robust and informative - -## Dependencies for Other Epics - -This epic is the culmination of: -- Epic 1 (database schema) -- Epic 2 (authentication) -- Epic 3 (extracted data) -- Epic 4 (indexed repositories) - -This is the primary user-facing API for CLI agents. diff --git a/docs/vision/epic-8-monitoring.md b/docs/vision/epic-8-monitoring.md deleted file mode 100644 index 8f262546..00000000 --- a/docs/vision/epic-8-monitoring.md +++ /dev/null @@ -1,285 +0,0 @@ -# Epic 8: Monitoring & Operations - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: 🟡 15% Complete (Minimal Progress) -**Priority**: Medium (Launch readiness) -**Estimated Duration**: 1 week -**Actual Progress**: Basic `/health` endpoint exists. Remaining: structured logging, metrics, alerts. - -## Overview - -Implement structured logging, health checks, and monitoring configuration. Use built-in tools (Fly.io, Supabase) to avoid new dependencies. - -## Issues - -### Issue #28: Set up structured logging with bun:logger - -**Priority**: P1 (High) -**Depends on**: None (can start early) -**Blocks**: Debugging and observability - -#### Description -Implement JSON-formatted structured logging with correlation IDs throughout the application. - -#### Acceptance Criteria -- [ ] JSON log format to stdout/stderr -- [ ] Correlation IDs: `request_id`, `user_id`, `job_id` -- [ ] Log levels: debug, info, warn, error -- [ ] Request/response logging middleware -- [ ] Error logging with stack traces -- [ ] Configurable log level via environment variable -- [ ] No PII in logs (mask sensitive data) - -#### Technical Notes -- Use Bun's built-in logger (lightweight, zero dependencies) -- Attach correlation ID to all log entries in a request -- Fly.io captures stdout/stderr automatically -- Query with `flyctl logs --app kotadb-prod` - -#### Files to Create -- `src/logging/logger.ts` - Logger configuration -- `src/logging/middleware.ts` - Request logging middleware -- `src/logging/correlation.ts` - Correlation ID management - -#### Example Implementation -```typescript -import { randomUUID } from 'crypto' - -export interface LogContext { - requestId?: string - userId?: string - jobId?: string - [key: string]: any -} - -export function createLogger(context: LogContext = {}) { - return { - debug: (message: string, meta?: any) => log('debug', message, { ...context, ...meta }), - info: (message: string, meta?: any) => log('info', message, { ...context, ...meta }), - warn: (message: string, meta?: any) => log('warn', message, { ...context, ...meta }), - error: (message: string, error?: Error, meta?: any) => { - log('error', message, { - ...context, - ...meta, - error: error?.message, - stack: error?.stack, - }) - }, - } -} - -function log(level: string, message: string, meta: any) { - const logEntry = { - timestamp: new Date().toISOString(), - level, - message, - ...meta, - } - - if (level === 'error') { - console.error(JSON.stringify(logEntry)) - } else { - console.log(JSON.stringify(logEntry)) - } -} - -// Helper: wrap fetch handlers with logging -export async function withRequestLogging( - request: Request, - handler: (logger: ReturnType) => Promise, - context: { userId?: string } = {} -): Promise { - const requestId = randomUUID() - const startedAt = Date.now() - const logger = createLogger({ requestId, ...context }) - - logger.info('Incoming request', { - method: request.method, - url: request.url, - }) - - try { - const response = await handler(logger) - logger.info('Request completed', { - statusCode: response.status, - durationMs: Date.now() - startedAt, - }) - return response - } catch (error) { - logger.error('Request failed', error as Error, { - durationMs: Date.now() - startedAt, - }) - throw error - } -} -``` - ---- - -### Issue #29: Enhanced health check endpoint - -**Priority**: P1 (High) -**Depends on**: #2 (database), #12 (queue), #16 (GitHub) -**Blocks**: Deployment - -#### Description -Build comprehensive health check endpoint that verifies all critical services. - -#### Acceptance Criteria -- [ ] GET /health returns 200 if all checks pass -- [ ] Returns 503 if any check fails -- [ ] Check database connection -- [ ] Check job queue health -- [ ] Check GitHub API connectivity (optional) -- [ ] Return detailed status for debugging -- [ ] Fly.io polls this endpoint for instance health - -#### Technical Notes -- Keep checks fast (< 500ms total) -- Cache check results briefly (10 seconds) -- Don't expose sensitive information in public endpoint -- Use separate `/health/detailed` for admin access - -#### Files to Create -- `src/api/health.ts` - Health check handlers - -#### Example Implementation -```typescript -export async function healthCheck(): Promise { - const checks = { - database: false, - queue: false, - github: false, - } - - try { - checks.database = await checkDatabaseHealth() - } catch (error) { - console.error('Database health check failed:', error) - } - - try { - checks.queue = await checkQueueHealth() - } catch (error) { - console.error('Queue health check failed:', error) - } - - try { - checks.github = await checkGitHubHealth() - } catch (error) { - console.warn('GitHub health check failed:', error) - } - - const allHealthy = checks.database && checks.queue - const statusCode = allHealthy ? 200 : 503 - - return new Response( - JSON.stringify({ - status: allHealthy ? 'healthy' : 'unhealthy', - checks, - timestamp: new Date().toISOString(), - }), - { - status: statusCode, - headers: { 'content-type': 'application/json' }, - } - ) -} - -async function checkDatabaseHealth(): Promise { - const { error } = await supabase.from('repositories').select('count').limit(1) - return !error -} - -async function checkQueueHealth(): Promise { - try { - await queue.getQueueSize('index-repo') - return true - } catch { - return false - } -} - -async function checkGitHubHealth(): Promise { - try { - const octokit = new Octokit({ auth: process.env.GITHUB_APP_PRIVATE_KEY }) - await octokit.rest.meta.root() - return true - } catch { - return false - } -} -``` - ---- - -### Issue #30: Configure Fly.io metrics and alerts - -**Priority**: P2 (Medium) -**Depends on**: #29 (health check) -**Blocks**: Production monitoring - -#### Description -Set up Fly.io metrics dashboard and configure alerts for critical failures. - -#### Acceptance Criteria -- [ ] Fly.io metrics enabled in dashboard -- [ ] Health check configured (polls `/health` every 30s) -- [ ] Alerts configured for: - - Instance downtime (restarts) - - High error rate (> 5% 5xx responses) - - High latency (p95 > 2 seconds) - - Failed health checks -- [ ] Alert destinations configured (email, Slack, or webhook) -- [ ] Documentation for viewing metrics - -#### Technical Notes -- Fly.io dashboard: https://fly.io/apps/kotadb-prod/metrics -- Configure via `fly.toml` or Fly.io UI -- Metrics retained for 30 days (free tier) - -#### Files to Update -- `fly.toml` - Health check configuration -- `docs/monitoring.md` - Monitoring guide - -#### Example fly.toml -```toml -[http_service] - internal_port = 3000 - force_https = true - auto_stop_machines = true - auto_start_machines = true - min_machines_running = 1 - -[checks] - [checks.health] - grace_period = "10s" - interval = "30s" - method = "GET" - path = "/health" - timeout = "5s" - type = "http" - -[metrics] - port = 9091 - path = "/metrics" -``` - ---- - -## Success Criteria - -- [ ] Structured logs are queryable with `flyctl logs` -- [ ] Correlation IDs trace requests end-to-end -- [ ] Health check accurately reflects service status -- [ ] Fly.io restarts unhealthy instances automatically -- [ ] Alerts notify team of critical issues -- [ ] Metrics dashboard shows latency, errors, throughput - -## Dependencies for Other Epics - -This epic supports: -- Debugging issues in all other epics -- Production deployment (Epic 9) -- Operational visibility diff --git a/docs/vision/epic-9-cicd-deployment.md b/docs/vision/epic-9-cicd-deployment.md deleted file mode 100644 index 4a192df2..00000000 --- a/docs/vision/epic-9-cicd-deployment.md +++ /dev/null @@ -1,289 +0,0 @@ -# Epic 9: CI/CD & Deployment - -> **Reference Document**: This epic was from original planning. See [ROADMAP.md](./ROADMAP.md) for current priorities and [CURRENT_STATE.md](./CURRENT_STATE.md) for gap analysis. - -**Status**: 🟡 45% Complete (CI Working, Pre-commit Hooks Added, Deployment Missing) -**Priority**: High (Launch blocker) -**Estimated Duration**: 1-2 weeks -**Actual Progress**: GitHub Actions CI working, Docker Compose for local dev. Remaining: Fly.io deployment, secrets management, automated migrations. - -## Overview - -Set up Fly.io deployment, CI/CD pipeline, and secrets management. Enable automated deployment from `develop` (staging) and `main` (production) branches. - -## Issues - -### Issue #31: Create Fly.io configuration - -**Priority**: P1 (High) -**Depends on**: None (can start early) -**Blocks**: #32 (CI/CD pipeline) - -#### Description -Configure Fly.io apps for staging and production environments. Create separate `fly.toml` configs with environment-specific settings. - -#### Acceptance Criteria -- [ ] Two Fly.io apps created: `kotadb-staging`, `kotadb-prod` -- [ ] Separate `fly.toml` configs: `fly.staging.toml`, `fly.prod.toml` -- [ ] Health check integration (`/health` endpoint) -- [ ] Resource limits configured (CPU, memory, disk) -- [ ] Auto-scaling rules (min/max instances) -- [ ] Persistent volumes for temporary workspaces (optional) -- [ ] Environment variables documented - -#### Technical Notes -- Region: Choose closest to Supabase region (likely `iad` or `lhr`) -- Image: Build from Dockerfile -- Port: 3000 (matches Bun server) -- Machine size: `shared-cpu-1x` for staging, `shared-cpu-2x` for prod - -#### Files to Create -- `fly.staging.toml` - Staging configuration -- `fly.prod.toml` - Production configuration -- `docs/deployment.md` - Deployment guide - -#### Example fly.staging.toml -```toml -app = "kotadb-staging" -primary_region = "iad" - -[build] - dockerfile = "Dockerfile" - -[env] - NODE_ENV = "staging" - PORT = "3000" - -[http_service] - internal_port = 3000 - force_https = true - auto_stop_machines = true - auto_start_machines = true - min_machines_running = 1 - processes = ["app"] - -[checks] - [checks.health] - grace_period = "10s" - interval = "30s" - method = "GET" - path = "/health" - timeout = "5s" - type = "http" - -[[vm]] - cpu_kind = "shared" - cpus = 1 - memory_mb = 1024 -``` - -#### Fly.io App Creation -```bash -# Create staging app -flyctl apps create kotadb-staging - -# Create production app -flyctl apps create kotadb-prod - -# Set secrets (see #33) -flyctl secrets set SUPABASE_URL=... --app kotadb-staging -``` - ---- - -### Issue #32: Build CI/CD pipeline - -**Priority**: P0 (Critical) -**Depends on**: #31 (Fly.io config), #3 (migrations), #34-38 (tests) -**Blocks**: Automated deployment - -#### Description -Create GitHub Actions workflow for validation, testing, migration, and deployment. - -#### Acceptance Criteria -- [ ] Workflow triggers on push to `feat/*`, `develop`, `main` -- [ ] Validation stage: lint, typecheck, test, build (all branches) -- [ ] Migration stage: apply migrations (develop, main only) -- [ ] Deployment stage: deploy to Fly.io (develop, main only) -- [ ] Branch routing: - - `develop` → `kotadb-staging` - - `main` → `kotadb-prod` -- [ ] Rollback on deployment failure -- [ ] Manual approval for production migrations (optional) - -#### Technical Notes -- Use `flyctl` GitHub Action for deployment -- Store Fly.io API token in GitHub Secrets -- Run migrations before deployment -- Deploy only if tests pass - -#### Files to Create -- `.github/workflows/ci.yml` - Main CI/CD workflow -- `.github/workflows/rollback.yml` - Manual rollback workflow - -#### Example Workflow -```yaml -name: CI/CD - -on: - push: - branches: - - feat/** - - develop - - main - -jobs: - validate: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: oven-sh/setup-bun@v1 - - run: bun install - - run: bun run lint - - run: bun run typecheck - - run: bun test - - run: bun run build - - migrate: - runs-on: ubuntu-latest - needs: validate - if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/main' - steps: - - uses: actions/checkout@v3 - - uses: oven-sh/setup-bun@v1 - - run: bun install - - name: Run migrations (staging) - if: github.ref == 'refs/heads/develop' - env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL_STAGING }} - SUPABASE_SERVICE_KEY: ${{ secrets.SUPABASE_SERVICE_KEY_STAGING }} - run: bun run migrate - - - name: Run migrations (prod) - if: github.ref == 'refs/heads/main' - env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL_PROD }} - SUPABASE_SERVICE_KEY: ${{ secrets.SUPABASE_SERVICE_KEY_PROD }} - run: bun run migrate - - deploy: - runs-on: ubuntu-latest - needs: migrate - if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/main' - steps: - - uses: actions/checkout@v3 - - uses: superfly/flyctl-actions/setup-flyctl@master - - - name: Deploy to staging - if: github.ref == 'refs/heads/develop' - env: - FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} - run: flyctl deploy --config fly.staging.toml --remote-only - - - name: Deploy to production - if: github.ref == 'refs/heads/main' - env: - FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} - run: flyctl deploy --config fly.prod.toml --remote-only - - - name: Verify deployment - run: | - if [ "${{ github.ref }}" == "refs/heads/develop" ]; then - curl -f https://kotadb-staging.fly.dev/health || exit 1 - else - curl -f https://kotadb-prod.fly.dev/health || exit 1 - fi -``` - ---- - -### Issue #33: Create secrets management scripts - -**Priority**: P2 (Medium) -**Depends on**: #31 (Fly.io apps exist) -**Blocks**: Production deployment - -#### Description -Build scripts to sync secrets from local SSOT files to Fly.io and Supabase. - -#### Acceptance Criteria -- [ ] `scripts/sync-secrets-staging.sh` syncs staging secrets -- [ ] `scripts/sync-secrets-prod.sh` syncs production secrets -- [ ] Scripts push to both Fly.io and Supabase (if needed) -- [ ] Documentation for secret rotation -- [ ] Template files for `.env.staging.secrets`, `.env.prod.secrets` -- [ ] Scripts validate required secrets before pushing - -#### Technical Notes -- Never commit actual secrets to git -- Store templates in `.env.sample.staging`, `.env.sample.prod` -- Use `flyctl secrets import` for bulk updates -- Validate secrets format before pushing - -#### Files to Create -- `scripts/sync-secrets-staging.sh` - Staging secret sync -- `scripts/sync-secrets-prod.sh` - Production secret sync -- `.env.sample.staging` - Staging secret template -- `.env.sample.prod` - Production secret template -- `docs/secrets.md` - Secrets management guide - -#### Example Script -```bash -#!/bin/bash -# scripts/sync-secrets-staging.sh - -set -e - -SECRETS_FILE=".env.staging.secrets" - -if [ ! -f "$SECRETS_FILE" ]; then - echo "Error: $SECRETS_FILE not found" - exit 1 -fi - -# Validate required secrets -required_secrets=( - "SUPABASE_URL" - "SUPABASE_SERVICE_KEY" - "GITHUB_APP_ID" - "GITHUB_APP_PRIVATE_KEY" - "GITHUB_WEBHOOK_SECRET" -) - -for secret in "${required_secrets[@]}"; do - if ! grep -q "^$secret=" "$SECRETS_FILE"; then - echo "Error: Missing required secret: $secret" - exit 1 - fi -done - -# Push to Fly.io -echo "Syncing secrets to Fly.io (kotadb-staging)..." -flyctl secrets import --app kotadb-staging < "$SECRETS_FILE" - -echo "Secrets synced successfully!" -``` - ---- - -## Success Criteria - -- [ ] Fly.io apps are created and configured -- [ ] CI/CD pipeline runs on all branches -- [ ] Tests gate all merges -- [ ] Migrations apply automatically before deployment -- [ ] Deployments succeed to staging and production -- [ ] Secrets are manageable via scripts -- [ ] Rollback procedure is documented and tested - -## Dependencies for Other Epics - -This epic depends on: -- Epic 1 (migrations) -- Epic 10 (tests must pass) -- All other epics (everything must work for deployment) - -This epic enables: -- Automated deployments -- Continuous delivery -- Staging environment testing diff --git a/docs/vision/manifesto.md b/docs/vision/manifesto.md deleted file mode 100644 index 09947081..00000000 --- a/docs/vision/manifesto.md +++ /dev/null @@ -1,327 +0,0 @@ -# The Future of Software is Autonomous Collaboration - -**A Manifesto for Agentic Engineering** - ---- - -## The Inflection Point - -We're living through the most significant shift in software development since the introduction of version control. For decades, we've built tools to make *humans* more productive: IDEs, linters, CI/CD pipelines, static analysis. We optimized for human speed, human memory, human cognition. - -**That era is over.** - -AI agents don't need syntax highlighting. They don't need code review checklists. They don't need Jira tickets or sprint planning meetings. They need something fundamentally different: **infrastructure for coordination, memory, and collaboration.** - -The question isn't "Can AI write code?" (it already does). The question is: **"How do we build systems where dozens of specialized AI agents coordinate to ship production software?"** - -This is the question KotaDB answers. - ---- - -## The Problem: Agents Don't Talk to Each Other - -Right now, if you want to use AI for software development, you have two options: - -### Option 1: Single Agent Systems -Use Claude Code, Cursor, or Copilot. One agent, one task at a time. Want to classify an issue? Ask Claude. Want to implement it? Ask Claude again. Want to review the code? Ask Claude a third time. - -**The problem**: You're the orchestrator. You're the memory. You're the state machine. The agent is a stateless function call. Every prompt starts from scratch. - -### Option 2: Custom Orchestration -Build your own system. Write Python scripts that call the Claude API. Chain agents together with subprocess calls. Persist state in JSON files. Manage git operations manually. Debug mysterious failures when agents conflict. - -**The problem**: You've just built a worse version of Kubernetes for agents. You're spending more time managing infrastructure than building features. - ---- - -## The Missing Layer: Multi-Agent Infrastructure - -What we need—what doesn't exist yet—is a **platform layer** for autonomous software development. The equivalent of Kubernetes for containers, but for AI agents. - -This platform must provide: - -### 1. **Standardized Communication** -Agents need a common language. Not REST APIs. Not GraphQL. Not bespoke JSON schemas. A **protocol** that works across LLM providers, tools, and custom agents. - -*This is what MCP (Model Context Protocol) provides.* - -### 2. **Resource Isolation** -When five agents work on the same codebase simultaneously, they need **isolated workspaces**. Not branches (too coarse). Not in-memory state (too fragile). Something that preserves git semantics while preventing conflicts. - -*This is what git worktrees provide.* - -### 3. **Persistent Memory** -Agents need to remember what they've done. Not just "I wrote a plan," but **where** the plan lives, **what** the next phase requires, **who** is responsible for executing it. - -*This is what state management provides.* - -### 4. **Security & Multi-Tenancy** -When agents access sensitive codebases or proprietary data, they need **authentication, rate limiting, and isolation**. Row-level security. Audit logs. The same guarantees we expect from production systems. - -*This is what Supabase + RLS provides.* - -### 5. **Workflow Orchestration** -Agents need to coordinate on complex, multi-phase workflows. Plan → Implement → Test → Review → Document. Each phase might use different agents. Failures need to retry. State needs to persist across phases. - -*This is what ADW (AI Developer Workflows) provides.* - ---- - -## The Vision: KotaDB as Infrastructure Layer - -**KotaDB is not a code search tool.** Code search is the *memory layer* that enables agents to understand codebases. The real product is the **orchestration infrastructure** that coordinates autonomous development workflows. - -Think of it this way: - -``` -GitHub = where humans collaborate on code -KotaDB = where agents collaborate on code - -Docker = how humans package applications -Agent Tools = how agents expose capabilities - -Kubernetes = how humans orchestrate containers -KotaDB ADW = how agents orchestrate workflows -``` - ---- - -## What This Looks Like in Practice - -Imagine this workflow: - -1. **User**: Creates GitHub issue: "Add rate limiting to /api/search endpoint" - -2. **Classifier Agent** (via KotaDB MCP): - - Calls `kotadb.search_code("rate limiting")` to find similar patterns - - Returns classification: `/feature` - -3. **Planner Agent** (via KotaDB MCP): - - Calls `kotadb.index_repository()` to refresh codebase context - - Calls `kotadb.search_code("middleware authentication")` to understand auth patterns - - Calls `kotadb.git_create_worktree("feat-rate-limit")` to get isolated workspace - - Writes plan to `docs/specs/feat-rate-limit.md` - - Calls `kotadb.git_commit()` to save plan - - Returns: `plan_file: "docs/specs/feat-rate-limit.md"` - -4. **Implementor Agent** (via KotaDB MCP): - - Reads plan from worktree - - Calls `kotadb.search_code("rate limit redis")` to find implementation examples - - Writes code: `app/src/middleware/rate-limit.ts` - - Calls `kotadb.git_commit()` to save implementation - -5. **Validator Agent** (via KotaDB MCP): - - Calls `kotadb.bun_validate()` to run lint, typecheck, tests - - Detects failure: "Type error in rate-limit.ts line 42" - - Returns feedback to Implementor Agent - -6. **Implementor Agent** (retry): - - Fixes type error based on feedback - - Calls `kotadb.git_commit()` to save fix - - Calls `kotadb.bun_validate()` again - - All checks pass ✅ - -7. **Reviewer Agent** (via KotaDB MCP): - - Calls `kotadb.search_code("rate limit test")` to verify test coverage - - Reads implementation from worktree - - Analyzes against plan - - Returns: `status: approved, blockers: []` - -8. **Documenter Agent** (via KotaDB MCP): - - Updates README.md with rate limiting documentation - - Calls `kotadb.git_commit()` to save docs - - Calls `kotadb.git_push_branch("feat-rate-limit")` to publish - -9. **PR Creator Agent** (via GitHub CLI): - - Creates pull request with summary - - Links to original issue - - Tags for human review - -**Total time**: 4 minutes. **Human intervention**: Zero (until PR review). - -This workflow is **impossible** with today's tools. You'd need custom glue code, manual state management, and brittle subprocess orchestration. **KotaDB makes it trivial.** - ---- - -## The Principles - -### 1. **Determinism + Creativity** -Agents bring creativity (LLM reasoning). Infrastructure brings determinism (predictable execution, reliable state, consistent APIs). Together, they produce **reliable autonomous systems**. - -### 2. **Composability Over Monoliths** -Don't build one super-agent that does everything. Build **specialized agents** (classifier, planner, implementor, reviewer) and compose them via workflows. Unix philosophy for AI. - -### 3. **Standards Over Silos** -Use MCP for communication. Use git for version control. Use standard databases for persistence. Don't invent new protocols. **Standardize on battle-tested infrastructure.** - -### 4. **Production-Grade, Not Prototypes** -Real authentication. Real rate limiting. Real error handling. Real logging. Real tests. If you wouldn't deploy it to production for human users, don't deploy it for agents. - -### 5. **Multi-Vendor by Default** -No lock-in. Claude for planning. OpenAI for implementation. Custom agents for security. Google for documentation. KotaDB coordinates them all. **Switzerland, not walled garden.** - ---- - -## The Moat - -Why is this hard? Why hasn't someone else built this? - -### 1. **Security is Hard** -Most MCP servers have no authentication (research found ~2,000 exposed servers). KotaDB has tier-based auth, rate limiting, and row-level security from day one. - -### 2. **Multi-Tenancy is Hard** -Isolating agents from each other requires deep understanding of databases, git semantics, and state management. Supabase RLS + worktrees + persistent state is a non-trivial combination. - -### 3. **Production is Hard** -Running one agent on a demo repo is easy. Running 65+ autonomous workflows on a real codebase with tests, CI/CD, and human collaboration is hard. KotaDB has done this. - -### 4. **Workflows are Hard** -Coordinating multi-phase SDLC workflows with retry logic, state persistence, and error handling requires deep software engineering expertise. Most AI companies don't have this DNA. - -**KotaDB has all four.** That's the moat. - ---- - -## The Market - -### Who Needs This? - -**Agentic Engineering Early Adopters** (today): -- Using Claude Code, Cursor, Copilot daily -- Frustrated by single-agent limitations -- Building custom orchestration scripts -- **Need**: Platform to coordinate multiple agents - -**Platform Engineering Teams** (6-12 months): -- Building internal developer platforms -- Standardizing on AI tooling -- Seeking self-hosted solutions -- **Need**: Infrastructure layer for agent workflows - -**AI-Native Startups** (12-24 months): -- Entire codebases managed by agents -- Minimal human engineering teams -- High tolerance for cutting-edge tech -- **Need**: Production-grade orchestration at scale - -### Market Size - -**TAM (Total Addressable Market)**: -- 31M software developers worldwide -- Average $100k/year salary -- **$3.1 trillion in developer productivity** - -**SAM (Serviceable Addressable Market)**: -- 10% early adopters (3.1M developers) -- $50/month average (solo + team tiers) -- **$1.86 billion annual** - -**SOM (Serviceable Obtainable Market)**: -- 0.1% market share (3,100 customers) -- $75/month average revenue per user -- **$2.79 million ARR** - -This is achievable within 18 months given: -- First-mover advantage -- Proven production usage -- Open-source community -- Strategic partnerships (Anthropic, Cursor, GitHub) - ---- - -## The Competition - -### What They're Building vs. What We're Building - -**LangChain, AutoGPT, CrewAI**: -- General-purpose agent frameworks -- Focus on RAG, chatbots, research agents -- Weak on software development workflows -- **We win**: Specialized for software development, production-grade - -**GitHub Copilot Workspace, Cursor**: -- Single-agent IDEs -- Monolithic architectures -- Closed ecosystems -- **We win**: Multi-agent, composable, open standards (MCP) - -**Replit Agent, Vercel v0**: -- End-to-end code generation -- Focus on greenfield projects -- Limited collaboration primitives -- **We win**: Brownfield support, SDLC workflows, agent coordination - -**CI/CD Platforms (GitHub Actions, CircleCI)**: -- YAML-driven automation -- No AI-native workflows -- Sequential execution -- **We win**: Autonomous decision-making, concurrent agents, self-correction - -**No one is building multi-agent infrastructure for software development.** This category doesn't exist yet. We're defining it. - ---- - -## The Timeline - -### Phase 1: Framework Core (Months 1-2) -**Goal**: Expose ADW capabilities via MCP - -- Build ADW MCP server (workflow orchestration tools) -- Create agent registry (catalog of available agents) -- Update documentation (framework-centric messaging) -- **Milestone**: 10 external agents registered, 100 workflows executed - -### Phase 2: Developer Experience (Months 3-4) -**Goal**: Make it trivial to build custom agents - -- Launch KotaDB CLI (agent management, workflow execution) -- Publish agent templates (Python, TypeScript, Rust) -- Build collaboration primitives (agent-to-agent messaging, resource locking) -- **Milestone**: 100 agents published, 1,000 workflows/month - -### Phase 3: Enterprise Platform (Months 5-6) -**Goal**: Production-ready for enterprise customers - -- Self-hosted deployment (Docker Compose, air-gapped) -- Agent marketplace (public registry, usage analytics) -- Compliance tooling (audit logs, SOC2 certification) -- **Milestone**: 10 enterprise customers, $100k MRR - ---- - -## The Call to Action - -**To Developers**: Stop building single-agent toys. Build multi-agent systems. KotaDB gives you the infrastructure. - -**To Companies**: Stop hiring more engineers to write boilerplate. Hire agents. KotaDB coordinates them. - -**To Investors**: This is the infrastructure layer for the next generation of software development. GitHub was $7.5B. Kubernetes changed the world. **KotaDB is the platform for autonomous development.** - ---- - -## The Future We're Building - -Five years from now, we'll look back at 2025 as the year software development fundamentally changed. The year we stopped *writing* code and started *orchestrating* agents to write it for us. - -The companies that win won't be the ones with the best LLMs. They'll be the ones with the best **infrastructure for agent collaboration**. The ones who figured out how to coordinate dozens of specialized agents to ship production software at scale. - -**That company is KotaDB.** - -We're not building a code search tool. We're building the operating system for autonomous software development. We're building the platform where the next million software projects will be built—not by humans, but by fleets of coordinated AI agents. - -The future of software is autonomous collaboration. - -**The future is KotaDB.** - ---- - -*"The best way to predict the future is to build it."* -— Alan Kay - ---- - -**Join us**: [kotadb.dev](https://kotadb.dev) (coming soon) -**Contribute**: [github.com/jayminwest/kota-db-ts](https://github.com/jayminwest/kota-db-ts) -**Discuss**: Discord (coming soon) - -*Written October 13, 2025 by the KotaDB team* diff --git a/web/.env.sample b/web/.env.sample deleted file mode 100644 index 7664a377..00000000 --- a/web/.env.sample +++ /dev/null @@ -1,32 +0,0 @@ -# KotaDB Web Application Environment Variables - -# Backend API URL -# Local: http://localhost:3000 -# Production: https://kotadb.fly.dev -NEXT_PUBLIC_API_URL=http://localhost:3000 - -# Supabase Configuration -# Get these values from your Supabase project settings at https://supabase.com/dashboard -# Local: http://localhost:54326 (Supabase Local Kong gateway) -# Production: https://.supabase.co -NEXT_PUBLIC_SUPABASE_URL=http://localhost:54326 -# Anon key is safe to expose in client-side code (RLS policies enforce data isolation) -NEXT_PUBLIC_SUPABASE_ANON_KEY=your-supabase-anon-key -# Service role key (server-side only, NEVER expose to client) -# Required for dev-session endpoint to use Admin API -# Get from Supabase dashboard > Project Settings > API > service_role key -SUPABASE_SERVICE_ROLE_KEY=your-supabase-service-role-key - -# Stripe Configuration (for checkout and billing) -# Get these values from your Stripe dashboard -NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_your_stripe_publishable_key - -# API Key for server-side API calls (optional, can be set via UI) -# Format: kota___ -API_KEY= - -# Dev Session Endpoint (optional, for testing only) -# WARNING: The /auth/dev-session endpoint is ONLY available in non-production environments -# It is automatically disabled when both NODE_ENV=production and VERCEL_ENV=production -# This endpoint allows Playwright agents to generate authenticated sessions without OAuth -# ENABLE_DEV_SESSION=true diff --git a/web/.eslintrc.json b/web/.eslintrc.json deleted file mode 100644 index bffb357a..00000000 --- a/web/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "next/core-web-vitals" -} diff --git a/web/.gitignore b/web/.gitignore deleted file mode 100644 index b8aeebed..00000000 --- a/web/.gitignore +++ /dev/null @@ -1,29 +0,0 @@ -# Dependencies -node_modules - -# Next.js -.next -out -*.tsbuildinfo -next-env.d.ts - -# Environment variables -.env -.env.local -.env.production - -# Debug logs -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# Editor -.DS_Store -*.sw? - -# Testing -/test-results/ -/playwright-report/ -/blob-report/ -/playwright/.cache/ -.vercel diff --git a/web/Dockerfile b/web/Dockerfile deleted file mode 100644 index 741b0442..00000000 --- a/web/Dockerfile +++ /dev/null @@ -1,40 +0,0 @@ -# syntax=docker/dockerfile:1 - -FROM oven/bun:1 AS base -WORKDIR /app - -# Development stage -FROM base AS development -COPY package.json bun.lockb ./ -COPY web/package.json ./web/ -COPY shared ./shared/ -RUN cd web && bun install --frozen-lockfile -WORKDIR /app/web -CMD ["bun", "run", "dev"] - -# Dependencies stage -FROM base AS deps -COPY package.json bun.lockb ./ -COPY web/package.json ./web/ -COPY shared ./shared/ -RUN cd web && bun install --frozen-lockfile --production - -# Build stage -FROM base AS builder -COPY package.json bun.lockb ./ -COPY web ./web/ -COPY shared ./shared/ -RUN cd web && bun install --frozen-lockfile -RUN cd web && bun run build - -# Production stage -FROM base AS production -ENV NODE_ENV=production -COPY --from=deps /app/web/node_modules ./web/node_modules -COPY --from=builder /app/web/.next ./web/.next -COPY --from=builder /app/web/public ./web/public -COPY --from=builder /app/web/package.json ./web/ -COPY --from=builder /app/shared ./shared/ -WORKDIR /app/web -EXPOSE 3001 -CMD ["bun", "run", "start"] diff --git a/web/README.md b/web/README.md deleted file mode 100644 index 95a2c64a..00000000 --- a/web/README.md +++ /dev/null @@ -1,202 +0,0 @@ -# KotaDB Web Application - -Next.js web interface for KotaDB MCP-first onboarding. - -## Product Philosophy - -KotaDB makes AI agents more effective by providing code intelligence through MCP (Model Context Protocol). The web frontend supports the onboarding flow: sign up → generate API key → copy config → better agents. - -## Features - -- **GitHub OAuth Authentication**: Secure sign-up and login via GitHub -- **API Key Management**: Generate, reset, and revoke API keys for MCP access -- **MCP Configuration**: Copy-paste configuration for Claude Code CLI integration -- **Stripe Integration**: Upgrade from free to solo/team tiers -- **Rate Limiting**: Visual rate limit quota tracking with countdown timer -- **Type-Safe API Client**: Shared TypeScript types with backend for compile-time safety - -## User Journey - -1. Sign up via GitHub OAuth (`/login`) -2. Generate API key (`/dashboard`) -3. Copy MCP configuration (`/mcp`) -4. Paste config into Claude Code CLI -5. AI agents can now search code, analyze dependencies, and more - -## Archived Pages - -The following pages have been archived to `web/app/_archive/` to reduce maintenance burden and clarify product focus: - -- `/search` - Full-text search interface (duplicates `mcp__kotadb__search-code` tool) -- `/repository-index` - Repository indexing UI (duplicates `mcp__kotadb__index-repository` tool) -- `/files` - Recent files browser (duplicates `mcp__kotadb__list-recent-files` tool) - -These pages duplicate MCP tool functionality and are not part of the core onboarding flow. Users interact with KotaDB via AI agents, not web forms. - -## Getting Started - -### Prerequisites - -- Bun 1.2.9 or later -- KotaDB API running on `http://localhost:3000` (default) -- Valid API key (format: `kota___`) - -### Development - -```bash -# Install dependencies (from repository root) -bun install - -# Start development server -cd web && bun run dev -``` - -The web app will be available at `http://localhost:3001`. - -### Environment Variables - -Create a `.env.local` file in the `web/` directory: - -```env -NEXT_PUBLIC_API_URL=http://localhost:3000 -``` - -See `.env.sample` for full configuration options. - -### Production Build - -```bash -cd web && bun run build -cd web && bun run start -``` - -### Docker - -```bash -# Start web service with Docker Compose -docker compose up web - -# Build web service container -docker compose build web -``` - -## Architecture - -### Shared Types - -The web app consumes backend types from `../shared/types/`: - -```typescript -import type { SearchRequest, SearchResponse } from '@shared/types/api' -import type { AuthContext, Tier } from '@shared/types/auth' -``` - -TypeScript path alias `@shared/*` points to `../shared/*` (configured in `tsconfig.json`). - -### API Client - -Type-safe fetch wrappers in `lib/api-client.ts`: - -```typescript -import { apiClient } from '@/lib/api-client' - -const { response, headers } = await apiClient.search({ term: 'function' }, apiKey) -``` - -All API methods return rate limit headers for quota tracking. - -### Authentication - -API keys stored in `localStorage` and passed via `Authorization: Bearer` header. -Managed by `AuthContext` provider in `context/AuthContext.tsx`. - -## Project Structure - -``` -web/ -├── app/ # Next.js 14 App Router -│ ├── _archive/ # Archived pages (ignored by Next.js routing) -│ │ ├── components/ # Components only used by archived pages -│ │ │ ├── SearchBar.tsx -│ │ │ └── FileList.tsx -│ │ ├── search/page.tsx # Archived search interface -│ │ ├── repository-index/page.tsx # Archived indexing UI -│ │ └── files/page.tsx # Archived files browser -│ ├── auth/ # Authentication routes -│ │ └── dev-session/route.ts # Dev-mode session endpoint -│ ├── layout.tsx # Root layout with navigation -│ ├── page.tsx # Landing page -│ ├── login/page.tsx # GitHub OAuth authentication -│ ├── dashboard/page.tsx # API key management + billing -│ ├── pricing/page.tsx # Stripe checkout -│ └── mcp/page.tsx # MCP configuration copy-paste -├── components/ # Reusable React components -│ ├── Navigation.tsx # Top navigation bar -│ ├── ApiKeyInput.tsx # API key management -│ ├── RateLimitStatus.tsx # Rate limit indicator -│ ├── KeyResetModal.tsx # API key reset confirmation -│ ├── KeyRevokeModal.tsx # API key revoke confirmation -│ └── mcp/ # MCP page components -│ ├── ConfigurationDisplay.tsx -│ ├── CopyButton.tsx -│ └── ToolReference.tsx -├── context/ # React context providers -│ └── AuthContext.tsx # Authentication state -├── lib/ # Utility libraries -│ ├── api-client.ts # Type-safe API client -│ └── playwright-helpers.ts # Test session management -└── public/ # Static assets -``` - -## Testing - -### Type Checking - -```bash -cd web && bunx tsc --noEmit -``` - -### Linting - -```bash -cd web && bun run lint -``` - -### Build Validation - -```bash -cd web && bun run build -``` - -## Deployment - -### Vercel - -When deploying to Vercel, configure the following environment variables in Project Settings → Environment Variables: - -**Required Variables:** -- `NEXT_PUBLIC_SUPABASE_URL`: Production Supabase project URL (from Supabase dashboard → Settings → API) -- `NEXT_PUBLIC_SUPABASE_ANON_KEY`: Production Supabase anon key (from Supabase dashboard → Settings → API) -- `NEXT_PUBLIC_API_URL`: Production KotaDB API URL (e.g., `https://api.kotadb.com`) - -**Scope:** Apply to Production, Preview, and Development environments - -**Security Notes:** -- Never commit credentials to git repository -- Use `.env.local` for local development (excluded by `.gitignore`) -- Production credentials should only exist in Vercel dashboard - -**Build Configuration:** -- Build Command: `cd web && bun run build` -- Output Directory: `web/.next` -- Install Command: `bun install` - -See `docs/deployment.md` for backend API deployment instructions. - -## Contributing - -Follow KotaDB contribution guidelines. Ensure all changes pass: - -- Type checking: `bunx tsc --noEmit` -- Linting: `bun run lint` -- Production build: `bun run build` diff --git a/web/app/_archive/components/FileList.tsx b/web/app/_archive/components/FileList.tsx deleted file mode 100644 index b2bc95ab..00000000 --- a/web/app/_archive/components/FileList.tsx +++ /dev/null @@ -1,73 +0,0 @@ -'use client' - -import type { SearchResult } from '@shared/types/api' - -interface FileListProps { - files: SearchResult[] - emptyMessage?: string -} - -export default function FileList({ files, emptyMessage = 'No files found' }: FileListProps) { - if (files.length === 0) { - return ( -
- {emptyMessage} -
- ) - } - - return ( -
- {files.map((file, index) => ( -
-
-
-

- {file.path} -

-
- Repository: {file.projectRoot} - {file.indexedAt && ( - - Indexed: {new Date(file.indexedAt).toLocaleDateString()} - - )} -
-
-
- - {file.snippet && ( -
-
-                
-                  {file.snippet}
-                
-              
-
- )} - - {file.dependencies && file.dependencies.length > 0 && ( -
-
- Dependencies: -
-
- {file.dependencies.map((dep, depIndex) => ( - - {dep} - - ))} -
-
- )} -
- ))} -
- ) -} diff --git a/web/app/_archive/components/SearchBar.tsx b/web/app/_archive/components/SearchBar.tsx deleted file mode 100644 index c77de90f..00000000 --- a/web/app/_archive/components/SearchBar.tsx +++ /dev/null @@ -1,54 +0,0 @@ -'use client' - -import { useState, FormEvent } from 'react' - -interface SearchBarProps { - onSearch: (term: string) => void - isLoading?: boolean - placeholder?: string -} - -export default function SearchBar({ - onSearch, - isLoading = false, - placeholder = 'Search code...', -}: SearchBarProps) { - const [term, setTerm] = useState('') - - const handleSubmit = (e: FormEvent) => { - e.preventDefault() - if (term.trim()) { - onSearch(term.trim()) - } - } - - return ( -
-
- setTerm(e.target.value)} - placeholder={placeholder} - className="flex-1 px-4 py-3 rounded-lg text-base glass-light dark:glass-dark text-gray-900 dark:text-gray-100 placeholder:text-gray-600 dark:placeholder:text-gray-400 focus:outline-none focus:ring-2 focus:ring-blue-500 transition-all" - disabled={isLoading} - /> - -
- -
- {term.trim().length > 0 && term.trim().length < 3 && ( - - Search term should be at least 3 characters - - )} -
-
- ) -} diff --git a/web/app/_archive/files/page.tsx b/web/app/_archive/files/page.tsx deleted file mode 100644 index e710e6c8..00000000 --- a/web/app/_archive/files/page.tsx +++ /dev/null @@ -1,129 +0,0 @@ -'use client' - -import { useState, useEffect } from 'react' -import { apiClient, ApiError } from '@/lib/api-client' -import { useAuth } from '@/context/AuthContext' -import FileList from '../components/FileList' -import type { SearchResult } from '@shared/types/api' - -export default function FilesPage() { - const { apiKey, updateRateLimitInfo, isAuthenticated } = useAuth() - const [files, setFiles] = useState([]) - const [isLoading, setIsLoading] = useState(false) - const [error, setError] = useState(null) - const [limit, setLimit] = useState(20) - - const loadFiles = async () => { - if (!isAuthenticated) { - setError('Please set an API key to view files') - return - } - - setIsLoading(true) - setError(null) - - try { - const { response, headers } = await apiClient.recentFiles(limit, apiKey!) - setFiles(response.results) - updateRateLimitInfo(headers) - } catch (err) { - if (err instanceof ApiError) { - if (err.status === 401) { - setError('Invalid API key. Please check your credentials.') - } else if (err.status === 429) { - setError('Rate limit exceeded. Please wait before trying again.') - } else { - setError(`Failed to load files: ${err.message}`) - } - } else { - setError('An unexpected error occurred') - } - setFiles([]) - } finally { - setIsLoading(false) - } - } - - useEffect(() => { - if (isAuthenticated) { - loadFiles() - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [isAuthenticated]) - - const handleRefresh = () => { - loadFiles() - } - - const handleLimitChange = (newLimit: number) => { - setLimit(newLimit) - // Reload with new limit after state update - setTimeout(() => loadFiles(), 0) - } - - return ( -
-
-
-

Recent Files

-

- View recently indexed files across all repositories -

-
- - -
- - {!isAuthenticated && ( -
-

- Please set your API key in the navigation bar to view files. -

-
- )} - - {isAuthenticated && ( -
- - -
- )} - - {error && ( -
-

{error}

-
- )} - - {isLoading && ( -
-
-

Loading files...

-
- )} - - {!isLoading && isAuthenticated && ( - - )} -
- ) -} diff --git a/web/app/_archive/repository-index/page.tsx b/web/app/_archive/repository-index/page.tsx deleted file mode 100644 index 2c9eab8c..00000000 --- a/web/app/_archive/repository-index/page.tsx +++ /dev/null @@ -1,299 +0,0 @@ -'use client' - -import { useState, useEffect, useRef, FormEvent } from 'react' -import { apiClient, ApiError } from '@/lib/api-client' -import { useAuth } from '@/context/AuthContext' -import type { JobStatusResponse } from '@shared/types/api' - -export default function IndexPage() { - const { apiKey, updateRateLimitInfo, isAuthenticated } = useAuth() - const [repository, setRepository] = useState('') - const [ref, setRef] = useState('') - const [isLoading, setIsLoading] = useState(false) - const [error, setError] = useState(null) - const [success, setSuccess] = useState(null) - const [jobDetails, setJobDetails] = useState(null) - const [pollingActive, setPollingActive] = useState(false) - const pollingIntervalRef = useRef(null) - const [pollingDelay, setPollingDelay] = useState(3000) // Start at 3s - - const pollJobStatus = async (jobId: string) => { - try { - const { response, headers } = await apiClient.getJobStatus(jobId, apiKey!) - updateRateLimitInfo(headers) - setJobDetails(response) - - // Stop polling if terminal state reached - if (['completed', 'failed', 'skipped'].includes(response.status)) { - setPollingActive(false) - setPollingDelay(3000) // Reset for next job - if (pollingIntervalRef.current) { - clearTimeout(pollingIntervalRef.current as unknown as NodeJS.Timeout) - pollingIntervalRef.current = null - } - } - } catch (err) { - // Don't crash UI on polling errors - just log them - if (err instanceof Error) { - process.stderr.write(`Polling error: ${err.message}\n`) - } - } - } - - useEffect(() => { - if (pollingActive && jobDetails?.id) { - const timeout = setTimeout(() => { - pollJobStatus(jobDetails.id) - - // Exponential backoff: multiply by 1.5, cap at 30s - setPollingDelay((prevDelay) => Math.min(prevDelay * 1.5, 30000)) - }, pollingDelay) - - pollingIntervalRef.current = timeout as unknown as NodeJS.Timeout - - return () => { - clearTimeout(timeout) - } - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [pollingActive, jobDetails?.id, pollingDelay]) - - const handleSubmit = async (e: FormEvent) => { - e.preventDefault() - - if (!isAuthenticated) { - setError('Please set an API key to index repositories') - return - } - - if (!repository.trim()) { - setError('Repository is required') - return - } - - setIsLoading(true) - setError(null) - setSuccess(null) - - try { - const { response, headers } = await apiClient.index( - { - repository: repository.trim(), - ref: ref.trim() || undefined, - }, - apiKey!, - ) - - updateRateLimitInfo(headers) - setSuccess(`Indexing job started successfully! Job ID: ${response.jobId}`) - setRepository('') - setRef('') - - // Start polling for job status - setJobDetails({ - id: response.jobId, - repository_id: '', - status: response.status as JobStatusResponse['status'], - }) - setPollingDelay(3000) // Reset delay for new job - setPollingActive(true) - } catch (err) { - if (err instanceof ApiError) { - if (err.status === 401) { - setError('Invalid API key. Please check your credentials.') - } else if (err.status === 429) { - setError('Rate limit exceeded. Please wait before trying again.') - } else { - setError(`Indexing failed: ${err.message}`) - } - } else { - setError('An unexpected error occurred') - } - } finally { - setIsLoading(false) - } - } - - function getStatusColorClass(status: string): string { - switch (status) { - case 'pending': - return 'bg-gray-200 text-gray-800 dark:bg-gray-700 dark:text-gray-200' - case 'processing': - return 'bg-yellow-200 text-yellow-800 dark:bg-yellow-700 dark:text-yellow-200' - case 'completed': - return 'bg-green-200 text-green-800 dark:bg-green-700 dark:text-green-200' - case 'failed': - return 'bg-red-200 text-red-800 dark:bg-red-700 dark:text-red-200' - case 'skipped': - return 'bg-gray-200 text-gray-800 dark:bg-gray-700 dark:text-gray-200' - default: - return 'bg-gray-200 text-gray-800 dark:bg-gray-700 dark:text-gray-200' - } - } - - function formatElapsedTime(startedAt: string): string { - const elapsed = Date.now() - new Date(startedAt).getTime() - const seconds = Math.floor(elapsed / 1000) - const minutes = Math.floor(seconds / 60) - if (minutes > 0) { - return `${minutes}m ${seconds % 60}s` - } - return `${seconds}s` - } - - return ( -
-
-

Index Repository

-

- Index a GitHub repository to make it searchable -

-
- - {!isAuthenticated && ( -
-

- Please set your API key in the navigation bar to index repositories. -

-
- )} - -
-
- - setRepository(e.target.value)} - placeholder="owner/repo (e.g., facebook/react)" - className="w-full px-4 py-3 border border-gray-300 dark:border-gray-700 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-blue-500" - disabled={isLoading} - /> -

- Format: owner/repository (e.g., facebook/react, vercel/next.js) -

-
- -
- - setRef(e.target.value)} - placeholder="main (defaults to repository default branch)" - className="w-full px-4 py-3 border border-gray-300 dark:border-gray-700 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-blue-500" - disabled={isLoading} - /> -

- Leave empty to use the default branch (main/master) -

-
- - -
- - {error && ( -
-

{error}

-
- )} - - {success && ( -
-

{success}

-

- The repository is being indexed in the background. You can search for files once indexing completes. -

-
- )} - - {jobDetails && ( -
-
-

- Indexing Progress -

- - {jobDetails.status} - -
- - {jobDetails.stats?.files_indexed !== undefined && ( -

- Files indexed: {jobDetails.stats.files_indexed} -

- )} - - {jobDetails.started_at && ( -

- Elapsed: {formatElapsedTime(jobDetails.started_at)} -

- )} - - {jobDetails.status === 'completed' && ( -

- ✓ Indexing completed successfully! You can now search this - repository. -

- )} - - {jobDetails.status === 'failed' && jobDetails.error_message && ( -

- ✗ Error: {jobDetails.error_message} -

- )} - - {pollingActive && ( -
- - - - - Updating status... -
- )} -
- )} - -
-

Indexing Notes

-
    -
  • Public repositories are cloned from GitHub automatically
  • -
  • Only TypeScript, JavaScript, and JSON files are indexed
  • -
  • Indexing runs asynchronously and may take several minutes
  • -
  • Dependencies and imports are extracted for code intelligence
  • -
-
-
- ) -} diff --git a/web/app/_archive/search/page.tsx b/web/app/_archive/search/page.tsx deleted file mode 100644 index 7ec81437..00000000 --- a/web/app/_archive/search/page.tsx +++ /dev/null @@ -1,94 +0,0 @@ -'use client' - -import { useState } from 'react' -import { apiClient, ApiError } from '@/lib/api-client' -import { useAuth } from '@/context/AuthContext' -import SearchBar from '../components/SearchBar' -import FileList from '../components/FileList' -import type { SearchResult } from '@shared/types/api' - -export default function SearchPage() { - const { apiKey, updateRateLimitInfo, isAuthenticated } = useAuth() - const [results, setResults] = useState([]) - const [isLoading, setIsLoading] = useState(false) - const [error, setError] = useState(null) - const [hasSearched, setHasSearched] = useState(false) - - const handleSearch = async (term: string) => { - if (!isAuthenticated) { - setError('Please set an API key to search') - return - } - - setIsLoading(true) - setError(null) - setHasSearched(true) - - try { - const { response, headers } = await apiClient.search({ term }, apiKey!) - setResults(response.results) - updateRateLimitInfo(headers) - } catch (err) { - if (err instanceof ApiError) { - if (err.status === 401) { - setError('Invalid API key. Please check your credentials.') - } else if (err.status === 429) { - setError('Rate limit exceeded. Please wait before trying again.') - } else { - setError(`Search failed: ${err.message}`) - } - } else { - setError('An unexpected error occurred') - } - setResults([]) - } finally { - setIsLoading(false) - } - } - - return ( -
-
-

Search Code

-

- Search across indexed repositories for code, functions, and patterns -

-
- - {!isAuthenticated && ( -
-

- Please set your API key in the navigation bar to search code. -

-
- )} - - - - {error && ( -
-

{error}

-
- )} - - {isLoading && ( -
-
-

Searching...

-
- )} - - {!isLoading && hasSearched && ( -
-
-

- {results.length} {results.length === 1 ? 'result' : 'results'} found -

-
- - -
- )} -
- ) -} diff --git a/web/app/auth/callback/route.ts b/web/app/auth/callback/route.ts deleted file mode 100644 index d6ea67e1..00000000 --- a/web/app/auth/callback/route.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { createClient } from '@/lib/supabase-server' -import { NextResponse } from 'next/server' - -export async function GET(request: Request) { - const requestUrl = new URL(request.url) - const code = requestUrl.searchParams.get('code') - const origin = requestUrl.origin - - if (code) { - const supabase = createClient() - const { error } = await supabase.auth.exchangeCodeForSession(code) - - if (error) { - process.stderr.write(`[OAuth] Failed to exchange code for session: ${error.message}\n`) - return NextResponse.redirect(`${origin}/login?error=auth_failed`) - } - } - - // Simple redirect - user will generate API key manually from dashboard - return NextResponse.redirect(`${origin}/dashboard`) -} diff --git a/web/app/auth/dev-session/route.ts b/web/app/auth/dev-session/route.ts deleted file mode 100644 index d64dc952..00000000 --- a/web/app/auth/dev-session/route.ts +++ /dev/null @@ -1,254 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { createClient } from '@supabase/supabase-js' -import { z } from 'zod' - -/** - * Dev-Mode Session Endpoint for Agent Authentication Bypass - * - * SECURITY: This endpoint is ONLY available in non-production environments. - * It will return 403 Forbidden if both NODE_ENV and VERCEL_ENV are set to 'production'. - * - * Purpose: Generate authenticated Supabase sessions on-demand for Playwright agents - * and automated workflows that cannot complete GitHub OAuth in headless environments. - * - * Request Schema: - * POST /auth/dev-session - * { - * "email": "test@example.com", // Required: email for test user - * "tier": "free" // Optional: subscription tier (default: "free") - * } - * - * Response Schema: - * { - * "userId": "uuid", - * "email": "test@example.com", - * "session": { - * "access_token": "eyJhbGci...", - * "refresh_token": "refresh-token", - * "expires_in": 3600, - * "expires_at": 1234567890 - * }, - * "apiKey": "kota_free_...", // Optional: may be undefined if generation fails - * "message": "Session created successfully" - * } - * - * Cookie Format (for Playwright injection): - * Cookie name: sb-{project-ref}-auth-token - * - Supabase Local: sb-localhost-auth-token - * - Production: sb-abcdefghijklmnop-auth-token - * - * Example Usage: - * ```bash - * # Create dev session - * curl -X POST http://localhost:3001/auth/dev-session \ - * -H "Content-Type: application/json" \ - * -d '{"email":"test@local.dev","tier":"free"}' - * - * # Health check - * curl http://localhost:3001/auth/dev-session - * ``` - */ - -// Request validation schema -const DevSessionRequestSchema = z.object({ - email: z.string().email('Invalid email format'), - tier: z.enum(['free', 'solo', 'team']).default('free') -}) - -// Response type definition -interface DevSessionResponse { - userId: string - email: string - session: { - access_token: string - refresh_token: string - expires_in: number // seconds - expires_at: number // unix timestamp - } - apiKey?: string // Optional (may fail to generate) - message: string -} - -/** - * Check if running in production environment - * Requires BOTH NODE_ENV and VERCEL_ENV to be 'production' for maximum safety - */ -function isProductionEnvironment(): boolean { - return ( - process.env.NODE_ENV === 'production' && - process.env.VERCEL_ENV === 'production' - ) -} - -/** - * GET /auth/dev-session - * Health check endpoint showing availability status - */ -export async function GET() { - const isProd = isProductionEnvironment() - - return NextResponse.json({ - available: !isProd, - environment: process.env.NODE_ENV || 'development', - vercelEnv: process.env.VERCEL_ENV || 'not-set', - message: isProd - ? 'Dev session endpoint not available in production' - : 'Dev session endpoint available' - }) -} - -/** - * POST /auth/dev-session - * Create authenticated session for test account - */ -export async function POST(request: NextRequest) { - // Environment guard - block production requests - if (isProductionEnvironment()) { - return NextResponse.json( - { error: 'Dev session endpoint not available in production' }, - { status: 403 } - ) - } - - try { - // Parse and validate request body - const body = await request.json() - const { email, tier } = DevSessionRequestSchema.parse(body) - - // Create Supabase admin client with service role key - const supabase = createClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.SUPABASE_SERVICE_ROLE_KEY!, - { - auth: { - autoRefreshToken: false, - persistSession: false - } - } - ) - - // Step 1: Create test user (idempotent via email uniqueness) - const { data: createData, error: createError } = await supabase.auth.admin.createUser({ - email, - email_confirm: true, // Skip email confirmation - user_metadata: { - test_account: true, - tier, - created_via: 'dev-session-endpoint' - } - }) - - // Handle duplicate user gracefully (user already exists) - if (createError && !createError.message.includes('User already registered')) { - process.stderr.write(`[dev-session] User creation failed: ${createError.message}\n`) - return NextResponse.json( - { error: 'Failed to create test user', details: createError.message }, - { status: 500 } - ) - } - - // Step 2: Generate magic link with hashed token (PKCE flow) - const { data: linkData, error: linkError } = await supabase.auth.admin.generateLink({ - type: 'magiclink', - email - }) - - if (linkError || !linkData) { - process.stderr.write(`[dev-session] Magic link generation failed: ${linkError?.message}\n`) - return NextResponse.json( - { error: 'Failed to generate magic link', details: linkError?.message }, - { status: 500 } - ) - } - - // Extract hashed token for OTP verification - const properties = linkData.properties as any - const hashed_token = properties.hashed_token as string - const userId = linkData.user.id - - if (!hashed_token) { - process.stderr.write('[dev-session] No hashed_token in generateLink response\n') - return NextResponse.json( - { error: 'Failed to generate session tokens', details: 'No hashed_token returned' }, - { status: 500 } - ) - } - - // Step 3: Exchange hashed token for actual session tokens via verifyOtp - const { data: otpData, error: otpError } = await supabase.auth.verifyOtp({ - token_hash: hashed_token, - type: 'magiclink' - }) - - if (otpError || !otpData?.session) { - process.stderr.write(`[dev-session] Token verification failed: ${otpError?.message}\n`) - return NextResponse.json( - { error: 'Failed to verify session tokens', details: otpError?.message }, - { status: 500 } - ) - } - - // Extract session tokens from verified OTP - const access_token = otpData.session.access_token - const refresh_token = otpData.session.refresh_token - const expires_in = otpData.session.expires_in || 3600 - const expires_at = otpData.session.expires_at || Math.floor(Date.now() / 1000) + expires_in - - // Step 4: Generate API key via backend endpoint (non-blocking) - let apiKey: string | undefined - - try { - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - const keyResponse = await fetch(`${apiUrl}/api/keys/generate`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${access_token}` - } - }) - - if (keyResponse.ok) { - const keyData = await keyResponse.json() - apiKey = keyData.apiKey || keyData.message - process.stdout.write(`[dev-session] API key generated successfully for ${email}\n`) - } else { - const errorText = await keyResponse.text() - process.stderr.write(`[dev-session] API key generation failed (${keyResponse.status}): ${errorText}\n`) - } - } catch (error) { - process.stderr.write(`[dev-session] API key generation error: ${error}\n`) - // Continue without API key - partial response still useful - } - - // Step 5: Return complete session data - const response: DevSessionResponse = { - userId, - email, - session: { - access_token, - refresh_token, - expires_in, - expires_at - }, - apiKey, - message: 'Session created successfully' - } - - process.stdout.write(`[dev-session] Created session for ${email} (tier: ${tier})\n`) - return NextResponse.json(response) - - } catch (error: any) { - if (error instanceof z.ZodError) { - return NextResponse.json( - { error: 'Invalid request body', details: error.issues }, - { status: 400 } - ) - } - - process.stderr.write(`[dev-session] Unexpected error: ${error}\n`) - return NextResponse.json( - { error: 'Internal server error', details: String(error) }, - { status: 500 } - ) - } -} diff --git a/web/app/auth/logout/route.ts b/web/app/auth/logout/route.ts deleted file mode 100644 index 26522f5e..00000000 --- a/web/app/auth/logout/route.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { createClient } from '@/lib/supabase-server' -import { NextResponse } from 'next/server' - -export async function POST(request: Request) { - const requestUrl = new URL(request.url) - const origin = requestUrl.origin - - const supabase = createClient() - await supabase.auth.signOut() - - return NextResponse.redirect(`${origin}/`) -} diff --git a/web/app/dashboard/page.tsx b/web/app/dashboard/page.tsx deleted file mode 100644 index 6d205fb7..00000000 --- a/web/app/dashboard/page.tsx +++ /dev/null @@ -1,594 +0,0 @@ -'use client' - -import { useAuth } from '@/context/AuthContext' -import { useState, useEffect, Suspense } from 'react' -import { useRouter } from 'next/navigation' -import type { CreatePortalSessionResponse } from '@shared/types/api' -import KeyResetModal from '@/components/KeyResetModal' -import KeyRevokeModal from '@/components/KeyRevokeModal' - -interface KeyMetadata { - keyId: string - tier: string - rateLimitPerHour: number - createdAt: string - lastUsedAt: string | null - enabled: boolean -} - -function DashboardContent() { - const { user, subscription, apiKey, setApiKey, isLoading, session } = useAuth() - const [loadingPortal, setLoadingPortal] = useState(false) - const [billingError, setBillingError] = useState(null) - const [loadingKeyGen, setLoadingKeyGen] = useState(false) - const [copiedKey, setCopiedKey] = useState(false) - const [keyGenError, setKeyGenError] = useState(null) - const [keyGenSuccess, setKeyGenSuccess] = useState(null) - const [keyMetadata, setKeyMetadata] = useState(null) - const [loadingMetadata, setLoadingMetadata] = useState(false) - const [metadataError, setMetadataError] = useState(null) - const [showResetModal, setShowResetModal] = useState(false) - const [showRevokeModal, setShowRevokeModal] = useState(false) - const router = useRouter() - - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - - // Fetch API key from localStorage if user is authenticated but context doesn't have key yet - useEffect(() => { - const fetchApiKeyFromLocalStorage = async () => { - if (!user || apiKey || isLoading) { - return - } - - // Check localStorage for the API key secret - // Note: API key secrets are only shown once at generation and stored in localStorage - // If localStorage is cleared, users must reset their key to retrieve a new secret - const storedKey = localStorage.getItem('kotadb_api_key') - if (storedKey) { - setApiKey(storedKey) - } - } - - fetchApiKeyFromLocalStorage() - }, [user, apiKey, isLoading, setApiKey]) - - // Fetch key metadata when user is authenticated and has an API key - useEffect(() => { - if (user && apiKey) { - fetchKeyMetadata() - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [user, apiKey]) - - const handleManageBilling = async () => { - setBillingError(null) - setLoadingPortal(true) - - if (!session?.access_token) { - setBillingError('Authentication failed. Please refresh and try again.') - process.stderr.write('No session available for billing portal request\n') - setLoadingPortal(false) - return - } - - try { - const response = await fetch(`${apiUrl}/api/subscriptions/create-portal-session`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${session.access_token}`, - }, - body: JSON.stringify({ - returnUrl: window.location.href, - }), - }) - - if (response.ok) { - const data: CreatePortalSessionResponse = await response.json() - window.location.href = data.url - } else if (response.status === 401) { - setBillingError('Authentication failed. Please refresh and try again.') - process.stderr.write('Billing portal auth failed: 401 Unauthorized\n') - } else if (response.status === 404) { - setBillingError('No subscription found. Please contact support.') - process.stderr.write('Billing portal failed: No subscription found\n') - } else { - setBillingError('Failed to open billing portal. Please try again.') - const errorData = await response.json().catch(() => ({})) - process.stderr.write(`Billing portal error: ${JSON.stringify(errorData)}\n`) - } - } catch (error) { - setBillingError('Failed to open billing portal. Please try again.') - process.stderr.write(`Error creating portal session: ${error instanceof Error ? error.message : String(error)}\n`) - } finally { - setLoadingPortal(false) - } - } - - const handleGenerateApiKey = async () => { - setLoadingKeyGen(true) - setKeyGenError(null) - setKeyGenSuccess(null) - - try { - // Get the current session - const { createClient } = await import('@/lib/supabase') - const supabase = createClient() - const { data: { session }, error: sessionError } = await supabase.auth.getSession() - - if (sessionError || !session) { - setKeyGenError('You must be logged in to generate an API key') - return - } - - const response = await fetch(`${apiUrl}/api/keys/generate`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${session.access_token}`, - }, - }) - - if (response.ok) { - const keyData = await response.json() as { - apiKey?: string - keyId: string - message?: string - } - - if (keyData.apiKey) { - // New key generated - setApiKey(keyData.apiKey) - setKeyGenSuccess('API key successfully generated!') - // Auto-refresh metadata to show new key info - await fetchKeyMetadata() - } else if (keyData.message?.includes('already exists')) { - // Key already exists - fetch metadata to display it - setKeyGenError('You already have an API key. Fetching details...') - try { - await fetchKeyMetadata() - // Clear error and show success message after successful fetch - setKeyGenError(null) - setKeyGenSuccess('API key already exists and is active') - } catch (fetchError) { - // If fetch fails, update error message - setKeyGenError('You already have an API key. Please refresh the page to view details.') - } - } - } else { - const errorData = await response.json() as { error?: string } - setKeyGenError(errorData.error || 'Failed to generate API key') - } - } catch (error) { - process.stderr.write(`Error generating API key: ${error instanceof Error ? error.message : String(error)}\n`) - setKeyGenError('An unexpected error occurred. Please try again.') - } finally { - setLoadingKeyGen(false) - } - } - - const copyApiKey = () => { - if (apiKey) { - navigator.clipboard.writeText(apiKey) - setCopiedKey(true) - setTimeout(() => setCopiedKey(false), 2000) - } - } - - const fetchKeyMetadata = async () => { - setLoadingMetadata(true) - setMetadataError(null) - try { - const { createClient } = await import('@/lib/supabase') - const supabase = createClient() - const { data: { session }, error: sessionError } = await supabase.auth.getSession() - - if (sessionError || !session) { - setMetadataError('You must be logged in to view API key metadata') - return - } - - const response = await fetch(`${apiUrl}/api/keys/current`, { - headers: { - 'Authorization': `Bearer ${session.access_token}`, - }, - }) - - if (response.ok) { - const data = await response.json() as KeyMetadata - setKeyMetadata(data) - } else if (response.status === 404) { - setKeyMetadata(null) // No key exists - } else { - setMetadataError('Failed to load API key metadata') - } - } catch (error) { - setMetadataError('An unexpected error occurred') - } finally { - setLoadingMetadata(false) - } - } - - const handleResetApiKey = async () => { - try { - const { createClient } = await import('@/lib/supabase') - const supabase = createClient() - const { data: { session }, error: sessionError } = await supabase.auth.getSession() - - if (sessionError || !session) { - throw new Error('You must be logged in to reset your API key') - } - - const response = await fetch(`${apiUrl}/api/keys/reset`, { - method: 'POST', - headers: { - 'Authorization': `Bearer ${session.access_token}`, - }, - }) - - if (response.ok) { - const data = await response.json() - localStorage.setItem('kotadb_api_key', data.apiKey) - await fetchKeyMetadata() - return { apiKey: data.apiKey } - } else if (response.status === 429) { - const retryAfter = response.headers.get('Retry-After') - throw new Error(`Rate limit exceeded. Please try again in ${retryAfter} seconds.`) - } else { - const errorData = await response.json() - throw new Error(errorData.error || 'Failed to reset API key') - } - } catch (error: any) { - throw error - } - } - - const handleRevokeApiKey = async () => { - try { - const { createClient } = await import('@/lib/supabase') - const supabase = createClient() - const { data: { session }, error: sessionError } = await supabase.auth.getSession() - - if (sessionError || !session) { - throw new Error('You must be logged in to revoke your API key') - } - - const response = await fetch(`${apiUrl}/api/keys/current`, { - method: 'DELETE', - headers: { - 'Authorization': `Bearer ${session.access_token}`, - }, - }) - - if (response.ok) { - localStorage.removeItem('kotadb_api_key') - setKeyMetadata(null) - setKeyGenSuccess('API key revoked successfully') - // Reload to update auth context - window.location.reload() - } else { - const errorData = await response.json() - throw new Error(errorData.error || 'Failed to revoke API key') - } - } catch (error: any) { - throw error - } - } - - const formatDate = (dateString: string | null) => { - if (!dateString) return 'N/A' - return new Date(dateString).toLocaleDateString('en-US', { - year: 'numeric', - month: 'long', - day: 'numeric', - }) - } - - const formatRelativeTime = (dateString: string) => { - const date = new Date(dateString) - const now = new Date() - const diffMs = now.getTime() - date.getTime() - const diffMins = Math.floor(diffMs / 60000) - const diffHours = Math.floor(diffMs / 3600000) - const diffDays = Math.floor(diffMs / 86400000) - - if (diffMins < 1) return 'Just now' - if (diffMins < 60) return `${diffMins} minute${diffMins > 1 ? 's' : ''} ago` - if (diffHours < 24) return `${diffHours} hour${diffHours > 1 ? 's' : ''} ago` - return `${diffDays} day${diffDays > 1 ? 's' : ''} ago` - } - - const getStatusBadgeColor = (status: string) => { - switch (status) { - case 'active': - return 'bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200' - case 'trialing': - return 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200' - case 'past_due': - return 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200' - case 'canceled': - return 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200' - default: - return 'bg-gray-100 text-gray-800 dark:bg-gray-800 dark:text-gray-200' - } - } - - if (isLoading) { - return ( -
-
-
- ) - } - - return ( -
-
-
-

Dashboard

- - {/* User Profile Section */} -
-

Profile

-
-
- Email: -

{user?.email || 'N/A'}

-
-
- GitHub Username: -

- {user?.user_metadata?.user_name || 'N/A'} -

-
-
-
- - {/* Subscription Section */} -
-
-

Subscription

- {subscription && subscription.tier !== 'free' && ( - - )} -
- - {billingError && ( -
-

{billingError}

-
- )} - - {subscription ? ( -
-
- Tier: - - {subscription.tier.toUpperCase()} - -
-
- Status: - - {subscription.status.toUpperCase()} - -
-
- Current Period: -

- {formatDate(subscription.current_period_start)} - {formatDate(subscription.current_period_end)} -

-
- {subscription.cancel_at_period_end && ( -
-

- Your subscription will be canceled at the end of the current billing period. -

-
- )} -
- ) : ( -
-

You are on the free tier

- - Upgrade to Solo or Team - -
- )} -
- - {/* MCP Configuration Section */} - {apiKey && ( -
-

MCP Configuration

-

- Integrate KotaDB with Claude Code CLI using MCP (Model Context Protocol) -

- -
- )} - - {/* API Keys Section */} -
-

API Keys

- - {/* Success Message */} - {keyGenSuccess && ( -
-

- {keyGenSuccess} -

-
- )} - - {/* Error Message */} - {keyGenError && !keyMetadata && !apiKey && ( -
-

- {keyGenError} -

-
- )} - - {/* Key Metadata Card */} - {loadingMetadata && ( -
-
-
-
- )} - - {metadataError && ( -
-

- {metadataError} -

-
- )} - - {keyMetadata && ( -
-
- Key ID: - - {keyMetadata.keyId.substring(0, 8)}... - -
-
- Tier: - - {keyMetadata.tier.toUpperCase()} - -
-
- Rate Limit: - - {keyMetadata.rateLimitPerHour} requests/hour - -
-
- Created: - - {formatDate(keyMetadata.createdAt)} - -
-
- Last Used: - - {keyMetadata.lastUsedAt ? formatRelativeTime(keyMetadata.lastUsedAt) : 'Never used'} - -
-
- Status: - - Active - -
-
- )} - - {apiKey ? ( -
-
-
- {apiKey.substring(0, 20)}...{apiKey.substring(apiKey.length - 10)} -
- -
-

- Use this API key to authenticate requests to the KotaDB API -

- - {/* Key Management Buttons */} -
- - -
-
- ) : ( -
-

No API key configured

- -

- Click the button above to generate your first API key -

-
- )} -
-
-
- - {/* Modals */} - setShowResetModal(false)} - onReset={handleResetApiKey} - /> - setShowRevokeModal(false)} - onRevoke={handleRevokeApiKey} - /> -
- ) -} - -export default function DashboardPage() { - return ( - -
- - }> - -
- ) -} diff --git a/web/app/globals.css b/web/app/globals.css deleted file mode 100644 index 102574d5..00000000 --- a/web/app/globals.css +++ /dev/null @@ -1,155 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; - -:root { - --background: #ffffff; - --foreground: #171717; - - /* Glass design tokens */ - --glass-blur-sm: 4px; - --glass-blur-md: 10px; - --glass-blur-lg: 16px; - --glass-opacity-light: 0.7; - --glass-opacity-dark: 0.5; - --glass-border-light: rgba(255, 255, 255, 0.18); - --glass-border-dark: rgba(255, 255, 255, 0.12); - --glass-bg-light: rgba(255, 255, 255, 0.7); - --glass-bg-dark: rgba(255, 255, 255, 0.05); -} - -@media (prefers-color-scheme: dark) { - :root { - --background: #0a0a0a; - --foreground: #ededed; - --glass-bg-light: rgba(10, 10, 10, 0.7); - --glass-bg-dark: rgba(0, 0, 0, 0.5); - } -} - -/* Fallback for reduced transparency preference */ -@media (prefers-reduced-transparency: reduce) { - :root { - --glass-opacity-light: 0.95; - --glass-opacity-dark: 0.95; - --glass-bg-light: rgba(255, 255, 255, 0.95); - --glass-bg-dark: rgba(255, 255, 255, 0.95); - } - - @media (prefers-color-scheme: dark) { - :root { - --glass-bg-light: rgba(10, 10, 10, 0.95); - --glass-bg-dark: rgba(0, 0, 0, 0.95); - } - } -} - -body { - color: var(--foreground); - background: var(--background); - font-family: -apple-system, BlinkMacSystemFont, "SF Pro Display", "SF Pro Text", "Helvetica Neue", system-ui, sans-serif; -} - -/* Typography scale */ -h1 { - font-size: 3rem; - font-weight: 700; - line-height: 1.2; - letter-spacing: -0.02em; -} - -h2 { - font-size: 2.25rem; - font-weight: 700; - line-height: 1.3; - letter-spacing: -0.01em; -} - -h3 { - font-size: 1.875rem; - font-weight: 600; - line-height: 1.4; -} - -h4 { - font-size: 1.5rem; - font-weight: 600; - line-height: 1.4; -} - -h5 { - font-size: 1.25rem; - font-weight: 600; - line-height: 1.5; -} - -h6 { - font-size: 1rem; - font-weight: 600; - line-height: 1.5; -} - -p { - font-size: 1rem; - line-height: 1.6; - letter-spacing: 0.01em; -} - -code, pre { - font-family: "SF Mono", "Fira Code", "JetBrains Mono", "Menlo", "Monaco", monospace; -} - -@layer utilities { - .text-balance { - text-wrap: balance; - } - - /* Glass utility classes */ - .glass-light { - background: var(--glass-bg-light); - backdrop-filter: blur(var(--glass-blur-md)) saturate(180%); - -webkit-backdrop-filter: blur(var(--glass-blur-md)) saturate(180%); - border: 1px solid var(--glass-border-light); - } - - .glass-dark { - background: var(--glass-bg-dark); - backdrop-filter: blur(var(--glass-blur-md)) saturate(180%); - -webkit-backdrop-filter: blur(var(--glass-blur-md)) saturate(180%); - border: 1px solid var(--glass-border-dark); - } - - .glass-modal { - background: var(--glass-bg-light); - backdrop-filter: blur(var(--glass-blur-lg)) saturate(180%); - -webkit-backdrop-filter: blur(var(--glass-blur-lg)) saturate(180%); - border: 1px solid var(--glass-border-light); - } - - /* Fallback for browsers without backdrop-filter support */ - @supports not (backdrop-filter: blur(10px)) { - .glass-light, - .glass-dark, - .glass-modal { - background: rgba(255, 255, 255, 0.95); - } - - @media (prefers-color-scheme: dark) { - .glass-light, - .glass-dark, - .glass-modal { - background: rgba(10, 10, 10, 0.95); - } - } - } - - /* Fallback for reduced transparency */ - @media (prefers-reduced-transparency: reduce) { - .glass-light, - .glass-dark, - .glass-modal { - backdrop-filter: none; - -webkit-backdrop-filter: none; - } - } -} diff --git a/web/app/layout.tsx b/web/app/layout.tsx deleted file mode 100644 index c16ddb5b..00000000 --- a/web/app/layout.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import type { Metadata } from 'next' -import './globals.css' -import { AuthProvider } from '@/context/AuthContext' -import Navigation from '@/components/Navigation' -import { Analytics } from '@vercel/analytics/next' -import { SpeedInsights } from '@vercel/speed-insights/next' - -export const metadata: Metadata = { - title: 'KotaDB - Code Intelligence Platform', - description: 'Search and index code repositories with KotaDB', -} - -export default function RootLayout({ - children, -}: Readonly<{ - children: React.ReactNode -}>) { - return ( - - - -
- -
- {children} -
-
-
- KotaDB v0.1.0 - Code Intelligence Platform -
-
-
-
- - - - - ) -} diff --git a/web/app/login/page.tsx b/web/app/login/page.tsx deleted file mode 100644 index b95e7cd4..00000000 --- a/web/app/login/page.tsx +++ /dev/null @@ -1,84 +0,0 @@ -'use client' - -import { createClient } from '@/lib/supabase' -import { useRouter } from 'next/navigation' -import { useEffect, useState } from 'react' - -export default function LoginPage() { - const router = useRouter() - const [loading, setLoading] = useState(true) - const supabase = createClient() - - useEffect(() => { - // Check if user is already logged in - supabase.auth.getSession().then(({ data }: { data: { session: unknown } }) => { - if (data.session) { - router.push('/dashboard') - } else { - setLoading(false) - } - }) - }, [router, supabase.auth]) - - const handleGitHubLogin = async () => { - setLoading(true) - const { error } = await supabase.auth.signInWithOAuth({ - provider: 'github', - options: { - redirectTo: `${window.location.origin}/auth/callback`, - scopes: 'user:email', - }, - }) - - if (error) { - process.stderr.write(`Error logging in: ${error.message}\n`) - setLoading(false) - } - } - - if (loading) { - return ( -
-
-
-

Loading...

-
-
- ) - } - - return ( -
-
-
-

KotaDB

-

- Sign in to access your dashboard and manage your API keys -

-
- -
- -
- -
-

- By signing in, you agree to our Terms of Service and Privacy Policy -

-
-
-
- ) -} diff --git a/web/app/mcp/page.tsx b/web/app/mcp/page.tsx deleted file mode 100644 index 53ab1ca0..00000000 --- a/web/app/mcp/page.tsx +++ /dev/null @@ -1,356 +0,0 @@ -'use client' - -import { useAuth } from '@/context/AuthContext' -import { Suspense, useState, useEffect } from 'react' -import { useRouter } from 'next/navigation' -import ConfigurationDisplay from '@/components/mcp/ConfigurationDisplay' -import CopyButton from '@/components/mcp/CopyButton' -import ToolReference from '@/components/mcp/ToolReference' - -type ConfigType = 'global' | 'project' - -function MCPContent() { - const { user, apiKey, setApiKey, isLoading } = useAuth() - const router = useRouter() - const [selectedTab, setSelectedTab] = useState('global') - const [showKey, setShowKey] = useState(false) - const [copiedConfig, setCopiedConfig] = useState(null) - const [loadingKey, setLoadingKey] = useState(false) - const [keyFetchError, setKeyFetchError] = useState(null) - - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - - // Fetch API key from backend if user is authenticated but context doesn't have key yet - useEffect(() => { - const fetchApiKeyFromBackend = async () => { - if (!user || apiKey || loadingKey || isLoading) { - return - } - - // Check localStorage first as immediate fallback - const storedKey = localStorage.getItem('kotadb_api_key') - if (storedKey) { - setApiKey(storedKey) - return - } - - setLoadingKey(true) - setKeyFetchError(null) - - try { - const { createClient } = await import('@/lib/supabase') - const supabase = createClient() - const { data: { session }, error: sessionError } = await supabase.auth.getSession() - - if (sessionError || !session) { - setKeyFetchError('You must be logged in to view API key') - setLoadingKey(false) - return - } - - const response = await fetch(`${apiUrl}/api/keys/current`, { - headers: { - 'Authorization': `Bearer ${session.access_token}`, - }, - }) - - if (response.ok) { - const data = await response.json() - // API key metadata endpoint doesn't return the secret, so we need to check localStorage - // or prompt user to visit dashboard to see their key - if (storedKey) { - setApiKey(storedKey) - } - } else if (response.status === 404) { - // No key exists - this is expected, not an error - setKeyFetchError(null) - } else { - setKeyFetchError('Failed to load API key. Please try refreshing the page.') - } - } catch (error) { - setKeyFetchError('An unexpected error occurred while loading your API key.') - } finally { - setLoadingKey(false) - } - } - - fetchApiKeyFromBackend() - }, [user, apiKey, isLoading, loadingKey, apiUrl, setApiKey]) - - const generateConfiguration = (type: ConfigType) => { - if (!apiKey) return '' - - const config = { - mcpServers: { - kotadb: { - type: 'http', - url: `${apiUrl}/mcp`, - headers: { - Authorization: `Bearer ${apiKey}` - } - } - } - } - - return JSON.stringify(config, null, 2) - } - - const handleCopy = async (type: ConfigType) => { - const config = generateConfiguration(type) - try { - await navigator.clipboard.writeText(config) - setCopiedConfig(type) - setTimeout(() => setCopiedConfig(null), 2000) - } catch (error) { - // Fallback for older browsers - try { - const textArea = document.createElement('textarea') - textArea.value = config - document.body.appendChild(textArea) - textArea.select() - document.execCommand('copy') - document.body.removeChild(textArea) - setCopiedConfig(type) - setTimeout(() => setCopiedConfig(null), 2000) - } catch (fallbackError) { - process.stderr.write(`Failed to copy configuration: ${fallbackError instanceof Error ? fallbackError.message : String(fallbackError)}\n`) - } - } - } - - if (isLoading || loadingKey) { - return ( -
-
-
- ) - } - - return ( -
-
-
-
-

- MCP Configuration for Claude Code -

-

- Copy and paste this configuration to integrate KotaDB with Claude Code CLI -

-
- - {keyFetchError && ( -
-
- -
-

- Error Loading API Key -

-

- {keyFetchError} -

- -
- )} - - {!apiKey && !keyFetchError ? ( -
-
- -
-

- No API Key Generated -

-

- You need to generate an API key before you can configure MCP integration -

- -
- ) : ( - <> - {/* Tabs */} -
-
- -
- -
- {/* Configuration Info */} -
-

- {selectedTab === 'global' ? ( - <> - Global Configuration: Save to ~/.claude/mcp.json to use KotaDB in all Claude Code sessions - - ) : ( - <> - Project Configuration: Save to .mcp.json in your project root to use KotaDB only in this project - - )} -

-
- - {/* Configuration Display */} - - - {/* Action Buttons */} -
- - handleCopy(selectedTab)} - copied={copiedConfig === selectedTab} - /> -
-
-
- - {/* Setup Instructions */} -
-

- Setup Instructions -

-
    -
  1. - - 1 - - Click "Copy Configuration" button above -
  2. -
  3. - - 2 - -
    -
    Save the configuration to:
    -
      -
    • - macOS/Linux: ~/.claude/mcp.json (global) or .mcp.json (project) -
    • -
    • - Windows: %USERPROFILE%\.claude\mcp.json (global) or .mcp.json (project) -
    • -
    -
    -
  4. -
  5. - - 3 - -
    -
    Verify the configuration:
    - - claude mcp list - -

    - You should see "kotadb" in the list of available MCP servers -

    -
    -
  6. -
  7. - - 4 - - Start using KotaDB tools in Claude Code! -
  8. -
- -
-

- Troubleshooting -

-
    -
  • Connection failed: Verify your API key is valid and the server URL is correct
  • -
  • 401 Unauthorized: Your API key may have been revoked or is invalid
  • -
  • 429 Rate Limited: You have exceeded your tier's rate limit. Upgrade or wait for the rate limit to reset
  • -
-
-
- - {/* Available Tools */} - - - )} -
-
-
- ) -} - -export default function MCPPage() { - return ( - -
- - }> - -
- ) -} diff --git a/web/app/page.tsx b/web/app/page.tsx deleted file mode 100644 index 9cfdd8b7..00000000 --- a/web/app/page.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import FeatureShowcase from "@/components/FeatureShowcase"; -import LandingHero from "@/components/LandingHero"; -import UserJourney from "@/components/UserJourney"; -import Link from "next/link"; - -export default function Home() { - return ( -
- - - - - {/* CTA Section */} -
-
-

- Make Your AI Smarter in 30 Seconds -

-

- Join developers who've given Claude Code a searchable memory of - their codebase. Free tier. No credit card. Start now. -

-
- - Get Started for Free - -
-
-
-
- ); -} diff --git a/web/app/pricing/page.tsx b/web/app/pricing/page.tsx deleted file mode 100644 index 7d337d90..00000000 --- a/web/app/pricing/page.tsx +++ /dev/null @@ -1,225 +0,0 @@ -'use client' - -import { useAuth } from '@/context/AuthContext' -import { useState } from 'react' -import type { CreateCheckoutSessionResponse } from '@shared/types/api' - -export default function PricingPage() { - const { isAuthenticated, subscription, session } = useAuth() - const [loadingTier, setLoadingTier] = useState(null) - const [error, setError] = useState(null) - - const handleUpgrade = async (tier: 'solo' | 'team') => { - if (!isAuthenticated || !session) { - window.location.href = '/login' - return - } - - setLoadingTier(tier) - setError(null) - try { - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - const response = await fetch(`${apiUrl}/api/subscriptions/create-checkout-session`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${session.access_token}`, - }, - body: JSON.stringify({ - tier, - successUrl: `${window.location.origin}/dashboard?upgrade=success`, - cancelUrl: `${window.location.origin}/pricing?upgrade=canceled`, - }), - }) - - if (response.ok) { - const data: CreateCheckoutSessionResponse = await response.json() - window.location.href = data.url - } else { - const errorData = await response.json().catch(() => ({ error: 'Failed to create checkout session' })) - const errorMessage = errorData.error || 'Failed to create checkout session' - setError(errorMessage) - } - } catch (error) { - const message = error instanceof Error ? error.message : 'Network error. Please try again.' - setError(message) - } finally { - setLoadingTier(null) - } - } - - const isCurrentPlan = (tier: string) => { - return subscription?.tier === tier - } - - const tiers = [ - { - name: 'Free', - tier: 'free', - price: '$0', - period: 'forever', - description: 'Perfect for getting started', - features: [ - '1,000 requests per hour', - '5,000 requests per day', - 'Basic code search', - 'Repository indexing', - 'Community support', - ], - cta: 'Get Started', - highlighted: false, - }, - { - name: 'Solo', - tier: 'solo', - price: '$29.99', - period: 'per month', - description: 'For individual developers', - features: [ - '5,000 requests per hour', - '25,000 requests per day', - 'Advanced code search', - 'Unlimited repositories', - 'Priority support', - 'API access', - ], - cta: 'Upgrade to Solo', - highlighted: true, - }, - { - name: 'Team', - tier: 'team', - price: '$49.99', - period: 'per month', - description: 'For development teams', - features: [ - '25,000 requests per hour', - '100,000 requests per day', - 'Advanced code search', - 'Unlimited repositories', - 'Priority support', - 'API access', - 'Team collaboration', - 'Dedicated support', - ], - cta: 'Upgrade to Team', - highlighted: false, - }, - ] - - return ( -
-
-
-

- Choose Your Plan -

-

- Select the perfect plan for your needs -

- {error && ( -
-
- - - -
-

{error}

- -
-
-
- )} -
- -
- {tiers.map((tier) => ( -
- {tier.highlighted && ( -
- Most Popular -
- )} - -
-

- {tier.name} -

-

{tier.description}

- -
- - {tier.price} - - {tier.period} -
- -
    - {tier.features.map((feature, index) => ( -
  • - - - - {feature} -
  • - ))} -
- - {isCurrentPlan(tier.tier) ? ( -
- Current Plan -
- ) : tier.tier === 'free' ? ( - - {tier.cta} - - ) : ( - - )} -
-
- ))} -
-
-
- ) -} diff --git a/web/components/ApiKeyInput.tsx b/web/components/ApiKeyInput.tsx deleted file mode 100644 index 5067d2d7..00000000 --- a/web/components/ApiKeyInput.tsx +++ /dev/null @@ -1,91 +0,0 @@ -'use client' - -import { useState } from 'react' -import { useAuth } from '@/context/AuthContext' - -export default function ApiKeyInput() { - const { apiKey, setApiKey, isAuthenticated } = useAuth() - const [isEditing, setIsEditing] = useState(false) - const [inputValue, setInputValue] = useState('') - - const handleSave = () => { - if (inputValue.trim()) { - setApiKey(inputValue.trim()) - setInputValue('') - setIsEditing(false) - } - } - - const handleClear = () => { - setApiKey(null) - setInputValue('') - setIsEditing(false) - } - - const handleCancel = () => { - setInputValue('') - setIsEditing(false) - } - - if (!isEditing && !isAuthenticated) { - return ( - - ) - } - - if (!isEditing && isAuthenticated) { - return ( -
-
- API Key Set -
- - -
- ) - } - - return ( -
- setInputValue(e.target.value)} - placeholder="kota___" - className="px-3 py-2 rounded-md text-sm glass-light dark:glass-dark text-gray-900 dark:text-gray-100 placeholder:text-gray-600 dark:placeholder:text-gray-400 focus:outline-none focus:ring-2 focus:ring-blue-500 w-64 transition-all" - onKeyDown={(e) => { - if (e.key === 'Enter') handleSave() - if (e.key === 'Escape') handleCancel() - }} - /> - - -
- ) -} diff --git a/web/components/FeatureShowcase.tsx b/web/components/FeatureShowcase.tsx deleted file mode 100644 index af90ef54..00000000 --- a/web/components/FeatureShowcase.tsx +++ /dev/null @@ -1,58 +0,0 @@ -export default function FeatureShowcase() { - const features = [ - { - title: "Instant Code Search", - description: - "Your AI finds exactly what it needs in milliseconds—no more expensive file-by-file reading that burns through tokens and context windows", - icon: "🔍", - }, - { - title: "Dependency Mapping", - description: - "Know what breaks before changing anything. Your AI sees the full picture of how files connect, preventing breaking changes", - icon: "🔗", - }, - { - title: "Change Impact Analysis", - description: - "Validate changes before your AI writes them. Catch architectural conflicts and missing test coverage automatically", - icon: "⚡", - }, - { - title: "Works with Claude Code", - description: - "Drop in your API key and Claude Code gains instant access to your entire codebase structure—zero config, maximum intelligence", - icon: "🔌", - }, - ]; - - return ( -
-
-
-

- Everything Your AI Needs to Stop Hallucinating -

-

- Real code intelligence—not just embedding search -

-
- -
- {features.map((feature) => ( -
-
{feature.icon}
-

{feature.title}

-

- {feature.description} -

-
- ))} -
-
-
- ); -} diff --git a/web/components/KeyResetModal.tsx b/web/components/KeyResetModal.tsx deleted file mode 100644 index 2d5d84d7..00000000 --- a/web/components/KeyResetModal.tsx +++ /dev/null @@ -1,149 +0,0 @@ -'use client' - -import { useState } from 'react' - -interface KeyResetModalProps { - isOpen: boolean - onClose: () => void - onReset: () => Promise<{ apiKey: string } | null> -} - -export default function KeyResetModal({ isOpen, onClose, onReset }: KeyResetModalProps) { - const [understood, setUnderstood] = useState(false) - const [loading, setLoading] = useState(false) - const [newKey, setNewKey] = useState(null) - const [error, setError] = useState(null) - const [copied, setCopied] = useState(false) - - if (!isOpen) return null - - const handleReset = async () => { - setLoading(true) - setError(null) - try { - const result = await onReset() - if (result) { - setNewKey(result.apiKey) - } - } catch (err: any) { - setError(err.message || 'Failed to reset API key') - } finally { - setLoading(false) - } - } - - const handleCopy = () => { - if (newKey) { - navigator.clipboard.writeText(newKey) - setCopied(true) - setTimeout(() => setCopied(false), 2000) - } - } - - const handleClose = () => { - setUnderstood(false) - setLoading(false) - setNewKey(null) - setError(null) - setCopied(false) - onClose() - } - - return ( -
-
-

- Reset API Key -

- - {!newKey ? ( - <> -
-

- Resetting your API key will immediately invalidate your old key and break any existing integrations. This action cannot be undone. -

-
- - {error && ( -
-

{error}

-
- )} - -
- -
- -
- - -
- - ) : ( - <> -
-

- API key successfully reset! -

-
- -
-

- Save this key now. It won't be shown again. -

-
- -
- -
- - -
-
- - - - )} -
-
- ) -} diff --git a/web/components/KeyRevokeModal.tsx b/web/components/KeyRevokeModal.tsx deleted file mode 100644 index 1e0ddf8b..00000000 --- a/web/components/KeyRevokeModal.tsx +++ /dev/null @@ -1,95 +0,0 @@ -'use client' - -import { useState } from 'react' - -interface KeyRevokeModalProps { - isOpen: boolean - onClose: () => void - onRevoke: () => Promise -} - -export default function KeyRevokeModal({ isOpen, onClose, onRevoke }: KeyRevokeModalProps) { - const [loading, setLoading] = useState(false) - const [error, setError] = useState(null) - const [success, setSuccess] = useState(false) - - if (!isOpen) return null - - const handleRevoke = async () => { - setLoading(true) - setError(null) - try { - await onRevoke() - setSuccess(true) - } catch (err: any) { - setError(err.message || 'Failed to revoke API key') - } finally { - setLoading(false) - } - } - - const handleClose = () => { - setLoading(false) - setError(null) - setSuccess(false) - onClose() - } - - return ( -
-
-

- Revoke API Key -

- - {!success ? ( - <> -
-

- Revoking your API key will disable all API access. You will need to generate a new key to continue using KotaDB. -

-
- - {error && ( -
-

{error}

-
- )} - -
- - -
- - ) : ( - <> -
-

- API key revoked successfully. You can generate a new key from the dashboard. -

-
- - - - )} -
-
- ) -} diff --git a/web/components/LandingHero.tsx b/web/components/LandingHero.tsx deleted file mode 100644 index de34c6a0..00000000 --- a/web/components/LandingHero.tsx +++ /dev/null @@ -1,93 +0,0 @@ -'use client' - -import Link from 'next/link' -import { useAuth } from '@/context/AuthContext' -import { useState, useEffect } from 'react' - -export default function LandingHero() { - const { user } = useAuth() - const [apiStatus, setApiStatus] = useState<'checking' | 'healthy' | 'error'>('checking') - const [apiVersion, setApiVersion] = useState('') - - useEffect(() => { - const checkApiHealth = async () => { - try { - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - const response = await fetch(`${apiUrl}/health`) - - if (response.ok) { - const data = await response.json() - setApiStatus('healthy') - setApiVersion(data.version || 'unknown') - } else { - setApiStatus('error') - } - } catch (error) { - setApiStatus('error') - } - } - - checkApiHealth() - }, []) - - return ( -
- {/* Background gradient */} -
- - {/* Content */} -
-

- Give Your AI a{' '} - - Searchable Memory - - {' '}of Your Codebase -

- -

- Stop your AI from guessing. KotaDB indexes your repositories so Claude Code can instantly find code, trace dependencies, and understand impact—without reading files one by one. -

- -
- {user ? ( - - Go to Dashboard - - ) : ( - - Get Started - - )} -
- - {/* API Status Badge */} -
-
- - {apiStatus === 'healthy' - ? `API: Healthy ${apiVersion && `(v${apiVersion})`}` - : apiStatus === 'error' - ? 'API: Unavailable' - : 'Checking API...' - } -
-
-
-
- ) -} diff --git a/web/components/Navigation.tsx b/web/components/Navigation.tsx deleted file mode 100644 index ea5eaaec..00000000 --- a/web/components/Navigation.tsx +++ /dev/null @@ -1,325 +0,0 @@ -'use client' - -import Link from 'next/link' -import { usePathname } from 'next/navigation' -import { useAuth } from '@/context/AuthContext' -import ApiKeyInput from './ApiKeyInput' -import RateLimitStatus from './RateLimitStatus' -import { useState, useEffect, useRef } from 'react' - -export default function Navigation() { - const pathname = usePathname() - const { isAuthenticated, user, subscription, signOut } = useAuth() - const [isMenuOpen, setIsMenuOpen] = useState(false) - const drawerRef = useRef(null) - const hamburgerRef = useRef(null) - - const isActive = (path: string) => pathname === path - - // Close menu on route change - useEffect(() => { - setIsMenuOpen(false) - }, [pathname]) - - // Handle escape key to close menu - useEffect(() => { - const handleEscape = (e: KeyboardEvent) => { - if (e.key === 'Escape' && isMenuOpen) { - setIsMenuOpen(false) - hamburgerRef.current?.focus() - } - } - document.addEventListener('keydown', handleEscape) - return () => document.removeEventListener('keydown', handleEscape) - }, [isMenuOpen]) - - // Focus management: trap focus inside drawer when open - useEffect(() => { - if (isMenuOpen && drawerRef.current) { - const focusableElements = drawerRef.current.querySelectorAll( - 'a[href], button:not([disabled])' - ) - const firstElement = focusableElements[0] as HTMLElement - const lastElement = focusableElements[focusableElements.length - 1] as HTMLElement - - const handleTab = (e: KeyboardEvent) => { - if (e.key !== 'Tab') return - - if (e.shiftKey) { - if (document.activeElement === firstElement) { - e.preventDefault() - lastElement?.focus() - } - } else { - if (document.activeElement === lastElement) { - e.preventDefault() - firstElement?.focus() - } - } - } - - firstElement?.focus() - document.addEventListener('keydown', handleTab) - return () => document.removeEventListener('keydown', handleTab) - } else if (!isMenuOpen && hamburgerRef.current && document.activeElement !== hamburgerRef.current) { - // Return focus to hamburger when closing drawer - hamburgerRef.current.focus() - } - }, [isMenuOpen]) - - const handleSignOut = async () => { - await signOut() - window.location.href = '/' - } - - const getTierBadgeColor = (tier: string) => { - switch (tier) { - case 'team': - return 'bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-200' - case 'solo': - return 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200' - default: - return 'bg-gray-100 text-gray-800 dark:bg-gray-800 dark:text-gray-200' - } - } - - return ( - <> - - - {/* Overlay backdrop */} - {isMenuOpen && ( -
setIsMenuOpen(false)} - aria-hidden="true" - /> - )} - - {/* Mobile drawer */} -
-
- {/* Logo section */} -
- - KotaDB - - -
- - {/* Navigation links */} -
- -
- - {/* User profile section */} - {isAuthenticated && ( -
-
-
- {user?.email || user?.user_metadata?.user_name || 'User'} -
- {subscription && ( - - {subscription.tier.toUpperCase()} - - )} -
- -
- )} -
-
- - ) -} diff --git a/web/components/RateLimitStatus.tsx b/web/components/RateLimitStatus.tsx deleted file mode 100644 index 65a14d7a..00000000 --- a/web/components/RateLimitStatus.tsx +++ /dev/null @@ -1,84 +0,0 @@ -'use client' - -import { useAuth } from '@/context/AuthContext' -import { useState, useEffect } from 'react' - -export default function RateLimitStatus() { - const { rateLimitInfo, isAuthenticated } = useAuth() - const [timeRemaining, setTimeRemaining] = useState('') - - useEffect(() => { - if (!rateLimitInfo) return - - const updateTimeRemaining = () => { - const now = Math.floor(Date.now() / 1000) - const seconds = Math.max(0, rateLimitInfo.reset - now) - - if (seconds <= 0) { - setTimeRemaining('Quota reset') - return - } - - const minutes = Math.floor(seconds / 60) - const remainingSeconds = seconds % 60 - - if (minutes > 0) { - setTimeRemaining(`${minutes}m ${remainingSeconds}s`) - } else { - setTimeRemaining(`${remainingSeconds}s`) - } - } - - updateTimeRemaining() - const interval = setInterval(updateTimeRemaining, 1000) - - return () => clearInterval(interval) - }, [rateLimitInfo]) - - if (!isAuthenticated || !rateLimitInfo) { - return null - } - - const percentage = (rateLimitInfo.remaining / rateLimitInfo.limit) * 100 - const isLow = percentage < 20 - const isCritical = percentage < 10 - - return ( -
-
-
- Rate Limit -
-
- {rateLimitInfo.remaining} / {rateLimitInfo.limit} -
-
- -
-
-
-
- {timeRemaining && ( -
- {timeRemaining} -
- )} -
-
- ) -} diff --git a/web/components/UserJourney.tsx b/web/components/UserJourney.tsx deleted file mode 100644 index c8222052..00000000 --- a/web/components/UserJourney.tsx +++ /dev/null @@ -1,59 +0,0 @@ -export default function UserJourney() { - const steps = [ - { - number: "1", - title: "Sign Up with GitHub", - description: "One-click OAuth—no passwords, no credit card, no friction", - }, - { - number: "2", - title: "Get Your API Key", - description: - "Free tier gives you 100 requests/hour to start. Upgrade as you grow", - }, - { - number: "3", - title: "Watch Your AI Get Smarter", - description: - "Paste your key into Claude Code. Instantly, your AI can search thousands of files, map dependencies, and validate changes—no more guessing", - }, - ]; - - return ( -
-
-
-

Get Started in 30 Seconds

-

- From signup to enhanced AI agents in three simple steps -

-
- -
- {steps.map((step, index) => ( -
-
-
-
- {step.number} -
-

{step.title}

-

- {step.description} -

-
-
- - {/* Arrow connector */} - {index < steps.length - 1 && ( -
- → -
- )} -
- ))} -
-
-
- ); -} diff --git a/web/components/mcp/ConfigurationDisplay.tsx b/web/components/mcp/ConfigurationDisplay.tsx deleted file mode 100644 index 393f648d..00000000 --- a/web/components/mcp/ConfigurationDisplay.tsx +++ /dev/null @@ -1,48 +0,0 @@ -'use client' - -interface ConfigurationDisplayProps { - configuration: string - showKey: boolean - apiKey: string -} - -export default function ConfigurationDisplay({ configuration, showKey, apiKey }: ConfigurationDisplayProps) { - const displayConfig = showKey - ? configuration - : configuration.replace(apiKey, '●●●●●●●●●●●●●●●●●●●●') - - const highlightJSON = (json: string) => { - // First, HTML-escape the entire JSON string to prevent XSS - const escaped = json - .replace(/&/g, '&') - .replace(//g, '>') - .replace(/"/g, '"') - .replace(/'/g, ''') - - // Then apply syntax highlighting to the escaped content - return escaped - .replace(/("(?:[^&]|&(?!quot;))*")\s*:/g, '$1:') - .replace(/:\s*("(?:[^&]|&(?!quot;))*")/g, ': $1') - .replace(/:\s*(true|false|null)/g, ': $1') - .replace(/:\s*(\d+)/g, ': $1') - } - - return ( -
-
-
-          
-        
-
-
- - JSON - -
-
- ) -} diff --git a/web/components/mcp/CopyButton.tsx b/web/components/mcp/CopyButton.tsx deleted file mode 100644 index 65c193b7..00000000 --- a/web/components/mcp/CopyButton.tsx +++ /dev/null @@ -1,52 +0,0 @@ -'use client' - -interface CopyButtonProps { - onClick: () => void - copied: boolean -} - -export default function CopyButton({ onClick, copied }: CopyButtonProps) { - return ( - - ) -} diff --git a/web/components/mcp/ToolReference.tsx b/web/components/mcp/ToolReference.tsx deleted file mode 100644 index a0933eb3..00000000 --- a/web/components/mcp/ToolReference.tsx +++ /dev/null @@ -1,98 +0,0 @@ -'use client' - -const tools = [ - { - name: 'search_code', - description: 'Search indexed code files for specific keywords or patterns. Returns matching files with context snippets.', - params: 'term (required), repository (optional), limit (optional)' - }, - { - name: 'index_repository', - description: 'Queue a repository for indexing. Supports GitHub URLs or local paths. Returns a run ID to track progress.', - params: 'repository (required), ref (optional), localPath (optional)' - }, - { - name: 'list_recent_files', - description: 'List recently indexed files ordered by indexing timestamp. Useful for seeing what code is available.', - params: 'limit (optional)' - }, - { - name: 'search_dependencies', - description: 'Search the dependency graph to find files that depend on or are depended on by a target file. Useful for impact analysis.', - params: 'file_path (required), repository (optional), direction (optional), depth (optional)' - }, - { - name: 'analyze_change_impact', - description: 'Analyze the impact of proposed code changes by examining dependency graphs, test scope, and potential conflicts. Returns comprehensive analysis including affected files, test recommendations, architectural warnings, and risk assessment. Useful for planning implementations and avoiding breaking changes.', - params: 'files_to_modify (optional), files_to_create (optional), files_to_delete (optional), change_type (required), description (required), breaking_changes (optional), repository (optional)' - }, - { - name: 'validate_implementation_spec', - description: 'Validate an implementation specification against KotaDB conventions and repository state. Checks for file conflicts, naming conventions, path alias usage, test coverage, and dependency compatibility. Returns validation errors, warnings, and approval conditions checklist.', - params: 'feature_name (required), files_to_create (optional), files_to_modify (optional), migrations (optional), dependencies_to_add (optional), breaking_changes (optional), repository (optional)' - } -] - -export default function ToolReference() { - return ( -
-

- Available MCP Tools -

-

- Once configured, you can use these tools in Claude Code to interact with KotaDB: -

-
- {tools.map((tool) => ( -
-
-
- - - -
-
-

- {tool.name} -

-

- {tool.description} -

-

- Parameters: {tool.params} -

-
-
-
- ))} -
-
-

- Example Usage -

- - {`# In Claude Code, you can ask: -"Search for authentication functions in my codebase" -"Index this repository: https://github.com/user/repo" -"Show me recent files that were indexed" -"Find all files that depend on src/auth/middleware.ts" -"Analyze the impact of modifying auth/middleware.ts" -"Validate my implementation spec for the new feature"`} - -
-
- ) -} diff --git a/web/context/AuthContext.tsx b/web/context/AuthContext.tsx deleted file mode 100644 index 87948dff..00000000 --- a/web/context/AuthContext.tsx +++ /dev/null @@ -1,216 +0,0 @@ -'use client' - -import React, { createContext, useContext, useState, useEffect, ReactNode } from 'react' -import { createClient } from '@/lib/supabase' -import type { Session, User } from '@supabase/supabase-js' -import type { CurrentSubscriptionResponse } from '@shared/types/api' - -interface RateLimitInfo { - limit: number - remaining: number - reset: number -} - -type Subscription = NonNullable - -interface AuthContextType { - session: Session | null - user: User | null - subscription: Subscription | null - isLoading: boolean - apiKey: string | null - setApiKey: (key: string | null) => void - rateLimitInfo: RateLimitInfo | null - updateRateLimitInfo: (headers: Headers) => void - isAuthenticated: boolean - signOut: () => Promise - refreshSubscription: () => Promise - refreshApiKey: () => Promise - revokeApiKey: () => Promise - resetApiKey: (newKey: string) => void -} - -const AuthContext = createContext(undefined) - -export function AuthProvider({ children }: { children: ReactNode }) { - const [session, setSession] = useState(null) - const [user, setUser] = useState(null) - const [subscription, setSubscription] = useState(null) - const [isLoading, setIsLoading] = useState(true) - const [apiKey, setApiKeyState] = useState(null) - const [rateLimitInfo, setRateLimitInfo] = useState(null) - const supabase = createClient() - - // Fetch subscription data from backend - const fetchSubscription = async (userSession: Session) => { - try { - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - const response = await fetch(`${apiUrl}/api/subscriptions/current`, { - headers: { - Authorization: `Bearer ${userSession.access_token}`, - }, - }) - - if (response.ok) { - const data: CurrentSubscriptionResponse = await response.json() - setSubscription(data.subscription) - } else { - setSubscription(null) - } - } catch (error) { - process.stderr.write(`Error fetching subscription: ${error instanceof Error ? error.message : String(error)}\n`) - setSubscription(null) - } - } - - // Validate API key against backend - const validateApiKey = async (key: string): Promise => { - try { - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - const response = await fetch(`${apiUrl}/api/keys/validate`, { - headers: { - 'Authorization': `Bearer ${key}`, - }, - }) - return response.ok - } catch (error) { - process.stderr.write(`[Auth] API key validation error: ${error instanceof Error ? error.message : String(error)}\n`) - return false - } - } - - // Load API key from localStorage on mount and validate it - useEffect(() => { - const stored = localStorage.getItem('kotadb_api_key') - if (stored) { - validateApiKey(stored).then(valid => { - if (valid) { - setApiKeyState(stored) - } else { - localStorage.removeItem('kotadb_api_key') - process.stderr.write('[Auth] Removed invalid API key from localStorage\n') - } - }) - } - }, []) - - // Initialize session and subscribe to auth changes - useEffect(() => { - supabase.auth.getSession().then(({ data: { session } }: { data: { session: Session | null } }) => { - setSession(session) - setUser(session?.user ?? null) - if (session) { - fetchSubscription(session) - } - setIsLoading(false) - }) - - const { - data: { subscription: authSubscription }, - } = supabase.auth.onAuthStateChange((_event: string, currentSession: Session | null) => { - setSession(currentSession) - setUser(currentSession?.user ?? null) - if (currentSession) { - fetchSubscription(currentSession) - } else { - setSubscription(null) - } - }) - - return () => authSubscription.unsubscribe() - }, [supabase.auth]) - - const setApiKey = (key: string | null) => { - if (key) { - localStorage.setItem('kotadb_api_key', key) - } else { - localStorage.removeItem('kotadb_api_key') - } - setApiKeyState(key) - } - - const updateRateLimitInfo = (headers: Headers) => { - const limit = headers.get('X-RateLimit-Limit') - const remaining = headers.get('X-RateLimit-Remaining') - const reset = headers.get('X-RateLimit-Reset') - - if (limit && remaining && reset) { - setRateLimitInfo({ - limit: parseInt(limit, 10), - remaining: parseInt(remaining, 10), - reset: parseInt(reset, 10), - }) - } - } - - const signOut = async () => { - await supabase.auth.signOut() - setApiKey(null) - setRateLimitInfo(null) - } - - const refreshSubscription = async () => { - if (session) { - await fetchSubscription(session) - } - } - - const refreshApiKey = async () => { - if (session) { - try { - const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - const response = await fetch(`${apiUrl}/api/keys/current`, { - headers: { - Authorization: `Bearer ${session.access_token}`, - }, - }) - - if (response.ok) { - const data = await response.json() - // Key metadata refresh successful (no secret returned) - } - } catch (error) { - // Error refreshing API key metadata - } - } - } - - const revokeApiKey = async () => { - setApiKey(null) - } - - const resetApiKey = (newKey: string) => { - setApiKey(newKey) - } - - return ( - - {children} - - ) -} - -export function useAuth() { - const context = useContext(AuthContext) - if (context === undefined) { - throw new Error('useAuth must be used within an AuthProvider') - } - return context -} diff --git a/web/docs/STYLING_GUIDE.md b/web/docs/STYLING_GUIDE.md deleted file mode 100644 index 5bb63739..00000000 --- a/web/docs/STYLING_GUIDE.md +++ /dev/null @@ -1,332 +0,0 @@ -# Liquid Glass Design System - Styling Guide - -## Overview - -This guide documents the Liquid Glass (glassmorphism) design system implementation for the KotaDB web application. The design uses frosted/translucent glass surfaces with backdrop blur effects to create visual depth while maintaining accessibility standards. - -## Design Tokens - -### CSS Custom Properties - -All glass effects are powered by CSS custom properties defined in `app/globals.css`: - -```css -/* Blur levels */ ---glass-blur-sm: 4px; ---glass-blur-md: 10px; ---glass-blur-lg: 16px; - -/* Opacity scales */ ---glass-opacity-light: 0.7; ---glass-opacity-dark: 0.5; - -/* Border colors */ ---glass-border-light: rgba(255, 255, 255, 0.18); ---glass-border-dark: rgba(255, 255, 255, 0.12); - -/* Background colors */ ---glass-bg-light: rgba(255, 255, 255, 0.7); ---glass-bg-dark: rgba(255, 255, 255, 0.05); -``` - -### Tailwind Extensions - -Custom backdrop-blur scales in `tailwind.config.ts`: - -```typescript -backdropBlur: { - xs: '2px', - sm: '4px', - md: '10px', - lg: '16px', - xl: '24px', -} -``` - -## Glass Utility Classes - -### Primary Glass Classes - -**`.glass-light`** - Light mode glass effect -- Use for primary surfaces in light mode -- Automatically switches to dark variant with `dark:` prefix -- Includes backdrop-filter blur and saturation boost - -**`.glass-dark`** - Dark mode glass effect -- Use for enhanced depth in dark mode -- Lower opacity for subtle layering -- Maintains readability in dark color schemes - -**`.glass-modal`** - High-blur modal overlay -- Use for modal dialogs and overlays -- Increased blur intensity (16px) for stronger separation -- Enhanced focus on modal content - -### Usage Examples - -#### Navigation Bar (Sticky Glass) -```tsx - -``` - -#### Card Containers -```tsx -
- {/* Card content */} -
-``` - -#### Input Fields -```tsx - -``` - -#### Badge/Pill Components -```tsx - - SOLO - -``` - -#### List Items with Hover Effects -```tsx -
- {/* List item content */} -
-``` - -## Accessibility - -### Contrast Requirements - -All text on glass surfaces meets WCAG 2.1 AA standards: -- **Body text**: Contrast ratio ≥4.5:1 -- **Large text** (18pt+): Contrast ratio ≥3:1 - -### Contrast Validation - -Text colors used on glass backgrounds: -- Light mode body text: `text-gray-900` (contrast: 5.2:1) -- Dark mode body text: `text-gray-100` (contrast: 4.8:1) -- Placeholder text: `text-gray-600` / `dark:text-gray-400` (contrast: 4.5:1) - -### Reduced Transparency Support - -The design system respects user accessibility preferences: - -```css -@media (prefers-reduced-transparency: reduce) { - .glass-light, - .glass-dark, - .glass-modal { - backdrop-filter: none; - -webkit-backdrop-filter: none; - /* Falls back to solid backgrounds with 95% opacity */ - } -} -``` - -**Testing**: Enable "Reduce transparency" in macOS System Preferences → Accessibility → Display to verify fallback behavior. - -## Browser Compatibility - -### Supported Browsers - -- Chrome 76+ (full support) -- Firefox 103+ (full support) -- Safari 9+ (requires `-webkit-` prefix, automatically included) -- Edge 79+ (full support) - -### Fallback for Unsupported Browsers - -The design gracefully degrades for browsers without `backdrop-filter` support: - -```css -@supports not (backdrop-filter: blur(10px)) { - .glass-light, - .glass-dark, - .glass-modal { - background: rgba(255, 255, 255, 0.95); - } -} -``` - -## Performance Considerations - -### Best Practices - -1. **Limit Layers**: Use glass effects on 3-4 layers maximum to prevent GPU overdraw -2. **Mobile Optimization**: Consider reducing blur intensity on mobile breakpoints: - ```tsx - className="backdrop-blur-sm md:backdrop-blur-md" - ``` -3. **Avoid Nested Glass**: Don't stack multiple glass containers inside each other -4. **Test on Low-End Devices**: Verify performance on iPhone SE and mid-range Android devices - -### Performance Monitoring - -Run Lighthouse audits regularly: -```bash -cd web && bun run build -# Open production build in browser -# DevTools → Lighthouse → Run audit -``` - -**Target Scores**: -- Performance: ≥90 -- Accessibility: ≥90 -- Best Practices: ≥90 - -## Component Patterns - -### Profile/Dashboard Cards - -```tsx -
-

- Section Title -

-
- {/* Card content */} -
-
-``` - -### Search Results - -```tsx -
-

- {filename} -

-
-    {snippet}
-  
-
-``` - -### Alert/Notice Boxes - -```tsx -{/* Warning */} -
-

Warning message

-
- -{/* Error */} -
-

Error message

-
- -{/* Success */} -
-

Success message

-
-``` - -## Troubleshooting - -### Issue: Text Readability on Glass Surfaces - -**Solution**: If contrast falls below 4.5:1, adjust glass opacity or add text shadow: -```tsx -className="text-gray-900 dark:text-gray-100 shadow-sm" -``` - -Or increase background opacity in CSS variables: -```css ---glass-bg-light: rgba(255, 255, 255, 0.8); /* increased from 0.7 */ -``` - -### Issue: Glass Effect Not Visible - -**Check**: -1. Ensure parent container has background content (glass needs content behind it to blur) -2. Verify `-webkit-` prefix is included for Safari -3. Check if browser supports `backdrop-filter` (use DevTools console): - ```javascript - CSS.supports('backdrop-filter', 'blur(10px)') - ``` - -### Issue: Performance Lag on Mobile - -**Solution**: Reduce blur intensity on mobile breakpoints: -```tsx -className="backdrop-blur-sm md:backdrop-blur-md lg:backdrop-blur-lg" -``` - -Or disable glass effects entirely on mobile: -```tsx -className="bg-white dark:bg-gray-800 md:glass-light md:dark:glass-dark" -``` - -## Migration from Solid Backgrounds - -### Step-by-Step Conversion - -1. **Identify target component** (card, modal, badge) -2. **Replace solid background classes**: - - Remove: `bg-white dark:bg-gray-800` - - Add: `glass-light dark:glass-dark` -3. **Update border styling**: - - Remove: `border border-gray-200 dark:border-gray-800` - - Add: `border-gray-200/50 dark:border-gray-800/50` (lower opacity) -4. **Validate text contrast** using WebAIM Contrast Checker -5. **Test hover states** and transitions -6. **Verify accessibility** with axe DevTools - -### Before/After Example - -**Before**: -```tsx -
-

Content

-
-``` - -**After**: -```tsx -
-

Content

-
-``` - -## Future Enhancements - -### Planned Improvements - -1. **Animated Glass Refraction** (WebGL or Canvas-based) - - Dynamic light refraction on hover - - Performance-conscious implementation - -2. **High Contrast Mode Toggle** - - User preference for disabling glass effects - - Stored in localStorage - -3. **Custom Glass Intensities** - - Utility classes for varying blur levels - - `.glass-subtle`, `.glass-intense` - -4. **Dark Mode Auto-Detection** - - Respect system preferences - - Smooth transitions between modes - -## References - -- [Apple Human Interface Guidelines - Materials](https://developer.apple.com/design/human-interface-guidelines/materials) -- [MDN: backdrop-filter](https://developer.mozilla.org/en-US/docs/Web/CSS/backdrop-filter) -- [WCAG 2.1 Contrast Requirements](https://www.w3.org/WAI/WCAG21/Understanding/contrast-minimum.html) -- [WebAIM Contrast Checker](https://webaim.org/resources/contrastchecker/) -- [Can I Use: backdrop-filter](https://caniuse.com/css-backdrop-filter) - -## Support - -For questions or issues related to the glass design system: -- Create an issue in the GitHub repository -- Tag with `component:web` and `design-system` labels -- Reference this guide in your issue description diff --git a/web/docs/specs/bug-450-manage-billing-button-non-functional.md b/web/docs/specs/bug-450-manage-billing-button-non-functional.md deleted file mode 100644 index 19e1a9e7..00000000 --- a/web/docs/specs/bug-450-manage-billing-button-non-functional.md +++ /dev/null @@ -1,385 +0,0 @@ -# Bug Plan: Manage Billing Button Non-Functional - -## Bug Summary -- **Observed Behaviour**: "Manage Billing" button on dashboard (develop.kotadb.io) is non-functional when clicked. No navigation occurs, no visible error message to user. Issue affects all browsers (Safari, Chrome) on both mobile and desktop platforms. -- **Expected Behaviour**: User clicks "Manage Billing" → Frontend calls POST `/api/subscriptions/create-portal-session` with JWT auth → Backend returns Stripe billing portal URL → Browser redirects to `billing.stripe.com` -- **Suspected Scope**: - 1. Missing authentication header in frontend fetch request (primary suspect) - 2. Silent error handling prevents user from seeing failure reason - 3. Backend endpoint may be rejecting unauthenticated requests with 401 - -## Root Cause Hypothesis - -**Primary Root Cause: Missing JWT Authentication Header** - -The billing portal endpoint at `app/src/api/routes.ts:785` requires authentication via the global middleware (line 434-472). The middleware enforces either: -- API key in `X-API-Key` header (programmatic access) -- JWT bearer token in `Authorization: Bearer ` header (OAuth web users) - -The `handleManageBilling` function at `web/app/dashboard/page.tsx:40-65` calls the endpoint **without authentication headers**: - -```typescript -const response = await fetch(`${apiUrl}/api/subscriptions/create-portal-session`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - // ❌ MISSING: 'Authorization': `Bearer ${session.access_token}` - }, - body: JSON.stringify({ returnUrl: window.location.href }), -}) -``` - -**Supporting Evidence:** -- Same authentication pattern bug was previously fixed in #320 for checkout session endpoint -- `AuthContext.tsx:48-52` demonstrates correct pattern (used in `fetchSubscription`) -- Dashboard component uses `useAuth()` hook (line 20) but doesn't access `session` property -- Backend logs would show 401 errors if monitoring were enabled - -**Secondary Issues:** - -1. **Silent Error Handling**: Lines 58-63 only log to `process.stderr.write()` without displaying user-facing error messages. User has no feedback when request fails. - -2. **No Session Validation**: Function doesn't verify `session` exists before attempting request, risking undefined access errors. - -## Fix Strategy - -### Code Changes - -1. **Frontend Authentication (Primary Fix)**: - - Modify `web/app/dashboard/page.tsx:40-65` to include Authorization header - - Extract `session` from `useAuth()` hook (line 20) - - Follow pattern from `AuthContext.tsx:48-52` for authenticated requests - - Add defensive session check before making request - -2. **Frontend Error Handling**: - - Add user-facing error state display - - Parse error response and show specific messages (401, 404, 500) - - Handle edge cases: missing session, network failure, Stripe config errors - - Consider toast notification or inline error display - -3. **Logging Enhancement** (Backend - Optional): - - Add structured logging for portal session creation failures - - Log customer ID lookup failures separately from Stripe API errors - - Aid in debugging production issues - -### Guardrails - -1. Add integration test for billing portal flow hitting real Supabase (per /anti-mock) -2. Add frontend validation to verify session exists before request -3. Add user feedback mechanism for all error cases -4. Test across browsers mentioned in issue (Safari, Chrome, mobile & desktop) - -## Relevant Files - -### Modified Files -- `web/app/dashboard/page.tsx` — Add Authorization header to billing portal request (lines 40-65), add error state UI -- `web/app/dashboard/page.tsx` — Extract session from useAuth hook (line 20) - -### Files for Reference (No Changes) -- `app/src/api/routes.ts:785-835` — Billing portal endpoint (already correct) -- `app/src/api/routes.ts:434-472` — Authentication middleware (already correct) -- `web/context/AuthContext.tsx:48-52` — Auth pattern reference for JWT token usage -- `docs/specs/bug-320-payment-links-not-redirecting.md` — Previous fix for identical issue in checkout flow - -### New Files -- `web/tests/dashboard/manage-billing.test.ts` — E2E test for billing portal flow (Playwright) - -## Task Breakdown - -### Verification -1. **Reproduce Bug on Staging**: - - Navigate to https://develop.kotadb.io/dashboard (logged in with paid subscription) - - Locate "Manage Billing" button in Subscription section - - Open browser DevTools → Network tab - - Click "Manage Billing" button - - Observe POST request to `/api/subscriptions/create-portal-session` - - Confirm response status 401 and error: `{"error": "Missing API key"}` - - Verify no user-facing error message appears - -2. **Verify Backend Endpoint**: - - Review `app/src/api/routes.ts:785-835` to confirm endpoint requires auth - - Verify endpoint returns 404 if no subscription exists (line 803-805) - - Confirm Stripe client initialization (lines 808-819) - - Test endpoint directly with curl + JWT token (should succeed) - -3. **Review Related Bug Fix**: - - Compare with `docs/specs/bug-320-payment-links-not-redirecting.md` - - Confirm identical root cause (missing Authorization header) - - Reuse testing approach and validation strategy - -### Implementation - -1. **Fix Frontend Authentication**: - - Open `web/app/dashboard/page.tsx` - - Update line 20: Extract session from useAuth hook - ```typescript - const { user, subscription, apiKey, setApiKey, isLoading, session } = useAuth() - ``` - - Add session validation before request (after line 41): - ```typescript - if (!session?.access_token) { - process.stderr.write('No session available for billing portal request\n') - return - } - ``` - - Add Authorization header (line 46-48): - ```typescript - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${session.access_token}`, - } - ``` - -2. **Add User-Facing Error Handling**: - - Add error state at top of DashboardContent component: - ```typescript - const [billingError, setBillingError] = useState(null) - ``` - - Update error handling block (replace lines 58-63): - ```typescript - if (response.ok) { - const data: CreatePortalSessionResponse = await response.json() - window.location.href = data.url - } else if (response.status === 401) { - setBillingError('Authentication failed. Please refresh and try again.') - process.stderr.write('Billing portal auth failed: 401 Unauthorized\n') - } else if (response.status === 404) { - setBillingError('No subscription found. Please contact support.') - process.stderr.write('Billing portal failed: No subscription found\n') - } else { - setBillingError('Failed to open billing portal. Please try again.') - const errorData = await response.json().catch(() => ({})) - process.stderr.write(`Billing portal error: ${JSON.stringify(errorData)}\n`) - } - ``` - - Render error message in Subscription section (after line 324): - ```typescript - {billingError && ( -
-

{billingError}

-
- )} - ``` - -3. **Clear Error on Retry**: - - Add `setBillingError(null)` at start of handleManageBilling (after line 41) - -4. **Add Playwright E2E Test**: - - Create `web/tests/dashboard/manage-billing.test.ts` - - Test authenticated user with paid subscription can open billing portal - - Test user without subscription sees error - - Test unauthenticated user is redirected to login - - Use Playwright browser automation to verify redirect behavior - - Leverage dev-session endpoint for test authentication (per feature-317) - -5. **Type Safety Check**: - - Verify `CreatePortalSessionResponse` type exists in `@shared/types/api` (line 147-150) - - Confirm import at top of dashboard file (line 6) - -### Validation - -1. **Manual Testing on Staging**: - - Deploy changes to develop.kotadb.io - - Test as authenticated user with paid subscription: - - Click "Manage Billing" button - - **Expected**: Redirect to `billing.stripe.com` portal - - **Expected**: Return URL brings user back to dashboard - - Test as free tier user: - - "Manage Billing" button should NOT be visible (line 315 conditional) - - Test error cases: - - Simulate 404 by temporarily revoking subscription in DB - - Verify user sees "No subscription found" error message - -2. **Cross-Browser Testing**: - - Test on Safari (macOS and iOS per issue description) - - Test on Chrome (desktop and mobile per issue description) - - Verify button click handler fires in all browsers - - Check DevTools console for any JavaScript errors - -3. **Network Inspection**: - - Verify Authorization header present in request - - Confirm response status 200 (not 401) - - Validate response body contains `{"url": "https://billing.stripe.com/..."}` - -4. **Error Display Testing**: - - Force 401 error: Remove Authorization header temporarily - - Verify error message appears in red box - - Force 404 error: Delete subscription record - - Verify "No subscription found" message - - Force 500 error: Temporarily break Stripe config - - Verify generic error message - -## Step by Step Tasks - -### Investigation and Reproduction -1. Log into develop.kotadb.io with test account that has paid subscription -2. Open browser DevTools → Network tab -3. Click "Manage Billing" button -4. Capture failed request details (status, headers, response body) -5. Document error in issue comment for stakeholder visibility - -### Frontend Authentication Fix -1. Open `web/app/dashboard/page.tsx` -2. Update useAuth destructuring to include session (line 20) -3. Add session validation check (after line 41) -4. Add Authorization header to fetch request (lines 46-48) -5. Run `cd web && bunx tsc --noEmit` to verify type safety - -### Error Handling Implementation -1. Add billingError state variable to DashboardContent component -2. Update handleManageBilling error handling (lines 58-63) -3. Add error message rendering in Subscription section UI (after line 324) -4. Clear error on retry by adding setBillingError(null) at function start -5. Test error display with mock 401/404/500 responses - -### E2E Test Creation -1. Create `web/tests/dashboard/manage-billing.test.ts` -2. Write test: authenticated user with subscription can access billing portal -3. Write test: user without subscription sees helpful error -4. Write test: button not visible for free tier users -5. Run tests: `cd web && bun test tests/dashboard/manage-billing.test.ts` - -### Cross-Browser Validation -1. Deploy changes to staging environment -2. Test on Safari desktop (macOS) -3. Test on Safari mobile (iOS simulator or device) -4. Test on Chrome desktop -5. Test on Chrome mobile (Android simulator or device) -6. Document results in issue comment - -### Production Readiness -1. Run full validation suite (see Validation Commands) -2. Test with real Stripe test mode configuration -3. Verify Stripe billing portal displays correctly -4. Confirm return URL navigation works -5. Check for any console errors or warnings - -### Documentation and Cleanup -1. Add code comment explaining Authorization header requirement -2. Update issue #450 with fix verification results -3. Document cross-browser test results -4. Remove any temporary debugging code -5. Run linter and fix any style issues - -### Push and PR Creation -1. Stage changes: `git add web/app/dashboard/page.tsx web/tests/dashboard/manage-billing.test.ts` -2. Commit: `fix: add authentication to billing portal request (#450)` -3. Push branch: `git push -u origin bug/450-manage-billing-button-non-functional` -4. Create PR linking to issue #450 -5. Request review from team - -## Regression Risks - -### Adjacent Features to Watch - -1. **Other Dashboard Actions** (API key generation, reset, revoke): - - Functions: `handleGenerateApiKey` (line 67), `handleResetApiKey` (line 174), `handleRevokeApiKey` (line 209) - - All use JWT authentication correctly (extract session, set Authorization header) - - Low risk: No changes to these functions - - Verify: API key management still works after deployment - -2. **Subscription Display** (`AuthContext.fetchSubscription`): - - Already uses Authorization header correctly (AuthContext.tsx:48-52) - - Powers subscription tier badge on dashboard - - Low risk: Uses same session object we're now accessing - - Verify: Subscription details still display correctly - -3. **Checkout Flow** (Fixed in #320): - - Pricing page → Stripe Checkout session creation - - Was previously broken due to same missing auth header issue - - Low risk: Separate endpoint, already fixed - - Verify: Upgrade flow from /pricing still works - -4. **MCP Configuration Navigation** (line 374-392): - - Router.push to /mcp page (client-side navigation) - - No API calls, no authentication required - - Very low risk: Pure navigation logic - - Verify: "Configure MCP Integration" button still navigates - -### Follow-up Work if Risk Materializes - -1. **If Button Still Non-Functional**: - - Check browser console for JavaScript errors - - Verify React event handler is attached to button element - - Inspect button's disabled state logic (line 318) - - Check if CSS is preventing click events (z-index, pointer-events) - -2. **If Backend Returns Different Error**: - - May indicate environment variable issues (STRIPE_SECRET_KEY) - - Could mean Stripe customer ID is missing/invalid in database - - Follow-up: Add backend logging to distinguish error types - - Follow-up: Create admin tool to audit subscription records - -3. **If Billing Portal Redirect Fails**: - - Stripe API may be rejecting customer ID - - Return URL validation may fail (Stripe checks domain whitelist) - - Follow-up: Add Stripe webhook logging to track portal events - - Follow-up: Verify Stripe dashboard settings allow return URLs - -4. **If Error Messages Too Generic**: - - Users may need more specific guidance for resolution - - Follow-up: Add error code system (BILLING_AUTH_FAILED, BILLING_NO_SUB, etc.) - - Follow-up: Create help center article for common billing errors - -## Validation Commands - -```bash -# Type checking -cd web && bunx tsc --noEmit - -# Linting -cd web && bun run lint - -# E2E tests -cd web && bun test tests/dashboard/manage-billing.test.ts - -# Build verification -cd web && bun run build - -# Manual staging test checklist: -# 1. Deploy to develop.kotadb.io -# 2. Login with paid subscription test account -# 3. Navigate to /dashboard -# 4. Verify "Manage Billing" button visible -# 5. Click button -# 6. Verify redirect to billing.stripe.com -# 7. Verify return URL brings back to dashboard -# 8. Check Network tab: POST request has Authorization header -# 9. Check Network tab: Response status 200 -# 10. Check console: No JavaScript errors - -# Cross-browser validation: -# - Safari macOS: ✓ -# - Safari iOS: ✓ -# - Chrome desktop: ✓ -# - Chrome mobile: ✓ - -# Error case testing: -# - Free tier user: Button not visible ✓ -# - No session: Error displayed ✓ -# - 404 (no subscription): Error displayed ✓ -``` - -## Commit Message Validation - -All commits for this bug fix will be validated. Ensure commit messages: -- Follow Conventional Commits format: `(): ` -- Valid types: feat, fix, chore, docs, test, refactor, perf, ci, build, style -- **AVOID meta-commentary patterns**: "based on", "the commit should", "here is", "this commit", "i can see", "looking at", "the changes", "let me" -- Use direct statements: `fix: add authentication to billing portal request` not `This commit adds authentication to the billing request` - -Example good commit messages: -``` -fix(dashboard): add JWT authentication to billing portal request -test(dashboard): add E2E tests for manage billing button -fix(dashboard): display user-facing errors for billing portal failures -docs(bug-450): document cross-browser test results -``` - -Example bad commit messages (do NOT use): -``` -fix: based on issue #450, this should fix the billing button -fix: looking at the dashboard, I can see the auth header is missing -fix: here is the fix for the manage billing bug -fix: this commit adds the Authorization header to the request -``` diff --git a/web/lib/api-client.ts b/web/lib/api-client.ts deleted file mode 100644 index 5acd8951..00000000 --- a/web/lib/api-client.ts +++ /dev/null @@ -1,250 +0,0 @@ -import type { - IndexRequest, - IndexResponse, - SearchRequest, - SearchResponse, - RecentFilesResponse, - HealthResponse, - JobStatusResponse, -} from '@shared/types/api' - -const API_BASE_URL = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3000' - -class ApiError extends Error { - constructor( - message: string, - public status: number, - public statusText: string, - public errorBody?: unknown, - ) { - super(message) - this.name = 'ApiError' - } -} - -interface FetchOptions { - apiKey?: string - signal?: AbortSignal - timeout?: number - skipRetry?: boolean -} - -const DEFAULT_TIMEOUT = 30000 // 30 seconds -const MAX_RETRIES = 3 -const RETRY_DELAYS = [1000, 2000, 4000] // 1s, 2s, 4s - -async function sleep(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)) -} - -async function fetchApi( - endpoint: string, - options: RequestInit & FetchOptions = {}, -): Promise<{ data: T; headers: Headers }> { - const { apiKey, timeout = DEFAULT_TIMEOUT, skipRetry = false, ...fetchOptions } = options - - const headers: Record = { - 'Accept': 'application/json', - } - - // Only add Content-Type for non-GET requests - if (fetchOptions.method && fetchOptions.method !== 'GET') { - headers['Content-Type'] = 'application/json' - } - - if (fetchOptions.headers) { - Object.assign(headers, fetchOptions.headers) - } - - if (apiKey) { - headers['Authorization'] = `Bearer ${apiKey}` - } - - const url = `${API_BASE_URL}${endpoint}` - - // Dev-mode logging - if (process.env.NODE_ENV === 'development') { - process.stdout.write( - `[API] ${fetchOptions.method || 'GET'} ${endpoint}\n` - ) - } - - let lastError: Error | null = null - const maxAttempts = skipRetry ? 1 : MAX_RETRIES - - for (let attempt = 0; attempt < maxAttempts; attempt++) { - try { - // Create abort controller for timeout - const controller = new AbortController() - const timeoutId = setTimeout(() => controller.abort(), timeout) - - const response = await fetch(url, { - ...fetchOptions, - headers, - signal: controller.signal, - }) - - clearTimeout(timeoutId) - - // Dev-mode logging - if (process.env.NODE_ENV === 'development') { - process.stdout.write( - `[API] ${response.status} ${endpoint}\n` - ) - } - - if (!response.ok) { - let errorBody: unknown = null - let errorMessage = response.statusText - - // Try to parse error response body - try { - errorBody = await response.json() - if (errorBody && typeof errorBody === 'object' && 'error' in errorBody) { - errorMessage = String(errorBody.error) - } - } catch { - // Fallback to statusText if JSON parsing fails - errorMessage = response.statusText || 'Request failed' - } - - const apiError = new ApiError( - errorMessage, - response.status, - response.statusText, - errorBody, - ) - - // Retry only on 5xx errors (not 4xx client errors) - if (response.status >= 500 && response.status < 600 && attempt < maxAttempts - 1) { - lastError = apiError - const delay = RETRY_DELAYS[attempt] - if (process.env.NODE_ENV === 'development') { - process.stderr.write( - `[API] Retry ${attempt + 1}/${MAX_RETRIES} after ${delay}ms (${response.status} ${endpoint})\n` - ) - } - await sleep(delay) - continue - } - - throw apiError - } - - const data = await response.json() - return { data, headers: response.headers } - } catch (error) { - if (error instanceof ApiError) { - throw error - } - - // Handle timeout and network errors - lastError = error instanceof Error ? error : new Error('Unknown error') - - if (attempt < maxAttempts - 1) { - const delay = RETRY_DELAYS[attempt] - if (process.env.NODE_ENV === 'development') { - process.stderr.write( - `[API] Retry ${attempt + 1}/${MAX_RETRIES} after ${delay}ms (network error on ${endpoint})\n` - ) - } - await sleep(delay) - continue - } - - throw new ApiError( - lastError.message || 'Network request failed', - 0, - 'Network Error', - ) - } - } - - // Should never reach here, but TypeScript requires it - throw lastError || new Error('Request failed') -} - -export const apiClient = { - /** - * Check API health status - */ - async health(): Promise { - const { data } = await fetchApi('/health') - return data - }, - - /** - * Search indexed code - */ - async search( - request: SearchRequest, - apiKey?: string, - ): Promise<{ response: SearchResponse; headers: Headers }> { - const queryParams = new URLSearchParams() - queryParams.set('term', request.term) - - if (request.repository) { - queryParams.set('repository', request.repository) - } - - if (request.limit) { - queryParams.set('limit', request.limit.toString()) - } - - const { data, headers } = await fetchApi( - `/search?${queryParams.toString()}`, - { apiKey }, - ) - - return { response: data, headers } - }, - - /** - * Index a repository - */ - async index( - request: IndexRequest, - apiKey?: string, - ): Promise<{ response: IndexResponse; headers: Headers }> { - const { data, headers } = await fetchApi('/index', { - method: 'POST', - body: JSON.stringify(request), - apiKey, - }) - - return { response: data, headers } - }, - - /** - * Get recently indexed files - */ - async recentFiles( - limit: number = 20, - apiKey?: string, - ): Promise<{ response: RecentFilesResponse; headers: Headers }> { - const { data, headers } = await fetchApi( - `/files/recent?limit=${limit}`, - { apiKey }, - ) - - return { response: data, headers } - }, - - /** - * Get index job status by job ID - */ - async getJobStatus( - jobId: string, - apiKey?: string, - ): Promise<{ response: JobStatusResponse; headers: Headers }> { - const { data, headers } = await fetchApi( - `/jobs/${jobId}`, - { apiKey }, - ) - - return { response: data, headers } - }, -} - -export { ApiError } -export type { FetchOptions } diff --git a/web/lib/playwright-helpers.ts b/web/lib/playwright-helpers.ts deleted file mode 100644 index 06306149..00000000 --- a/web/lib/playwright-helpers.ts +++ /dev/null @@ -1,255 +0,0 @@ -import type { Page, BrowserContext } from '@playwright/test' - -/** - * Playwright Helper Utilities for Dev Session Authentication - * - * These utilities enable automated cookie injection for Playwright tests, - * allowing agents to authenticate without completing GitHub OAuth flow. - */ - -/** - * Session data structure returned from /auth/dev-session endpoint - */ -export interface DevSession { - access_token: string - refresh_token: string - expires_in: number - expires_at: number -} - -/** - * Cookie structure for Playwright - */ -interface PlaywrightCookie { - name: string - value: string - domain: string - path: string - expires?: number - httpOnly: boolean - secure: boolean - sameSite: 'Strict' | 'Lax' | 'None' -} - -/** - * Generate Playwright-compatible cookies for Supabase SSR authentication - * - * Cookie name pattern: sb-{project-ref}-auth-token - * - Supabase Local: sb-localhost-auth-token - * - Production: sb-{16-char-project-ref}-auth-token - * - * @param session - Session data from /auth/dev-session endpoint - * @param projectRef - Supabase project reference (default: 'localhost' for local dev) - * @param domain - Cookie domain (default: 'localhost') - * @param secure - Use secure flag (default: false for local dev) - * @returns Array of Playwright cookie objects - */ -export function generatePlaywrightCookies( - session: DevSession, - projectRef: string = 'localhost', - domain: string = 'localhost', - secure: boolean = false -): PlaywrightCookie[] { - const cookieName = `sb-${projectRef}-auth-token` - - // Supabase SSR cookie value format (JSON string) - const cookieValue = JSON.stringify({ - access_token: session.access_token, - refresh_token: session.refresh_token, - expires_in: session.expires_in, - expires_at: session.expires_at, - token_type: 'bearer' - }) - - return [ - { - name: cookieName, - value: cookieValue, - domain, - path: '/', - expires: session.expires_at, - httpOnly: false, // Required for SSR client access - secure, - sameSite: 'Lax' - } - ] -} - -/** - * Inject session cookies into Playwright page context - * - * This helper automatically detects the Supabase project ref from environment - * variables and injects the appropriate cookies for authentication. - * - * @param page - Playwright page object - * @param session - Session data from /auth/dev-session endpoint - * @param options - Optional configuration for cookie generation - * @returns Promise that resolves when cookies are injected - * - * @example - * ```typescript - * // Fetch session from dev endpoint - * const response = await fetch('http://localhost:3001/auth/dev-session', { - * method: 'POST', - * headers: { 'Content-Type': 'application/json' }, - * body: JSON.stringify({ email: 'test@example.com' }) - * }) - * const { session } = await response.json() - * - * // Inject cookies into Playwright context - * await injectSessionCookies(page, session) - * - * // Navigate to protected route - * await page.goto('http://localhost:3001/dashboard') - * ``` - */ -export async function injectSessionCookies( - page: Page, - session: DevSession, - options?: { - projectRef?: string - domain?: string - secure?: boolean - } -): Promise { - const projectRef = options?.projectRef || - extractProjectRefFromUrl(process.env.NEXT_PUBLIC_SUPABASE_URL) || - 'localhost' - - const domain = options?.domain || 'localhost' - const secure = options?.secure || false - - const cookies = generatePlaywrightCookies(session, projectRef, domain, secure) - - await page.context().addCookies(cookies) -} - -/** - * Inject session cookies into Playwright browser context - * - * Alternative to injectSessionCookies that operates at the context level - * instead of page level. Useful for setting cookies before navigating. - * - * @param context - Playwright browser context - * @param session - Session data from /auth/dev-session endpoint - * @param options - Optional configuration for cookie generation - * @returns Promise that resolves when cookies are injected - */ -export async function injectSessionCookiesIntoContext( - context: BrowserContext, - session: DevSession, - options?: { - projectRef?: string - domain?: string - secure?: boolean - } -): Promise { - const projectRef = options?.projectRef || - extractProjectRefFromUrl(process.env.NEXT_PUBLIC_SUPABASE_URL) || - 'localhost' - - const domain = options?.domain || 'localhost' - const secure = options?.secure || false - - const cookies = generatePlaywrightCookies(session, projectRef, domain, secure) - - await context.addCookies(cookies) -} - -/** - * Extract Supabase project reference from Supabase URL - * - * Examples: - * - http://localhost:54326 → 'localhost' - * - https://abcdefghijklmnop.supabase.co → 'abcdefghijklmnop' - * - * @param supabaseUrl - Supabase URL from environment variable - * @returns Project reference string or null if extraction fails - */ -function extractProjectRefFromUrl(supabaseUrl?: string): string | null { - if (!supabaseUrl) return null - - try { - const url = new URL(supabaseUrl) - - // Local development (localhost or 127.0.0.1) - if (url.hostname === 'localhost' || url.hostname === '127.0.0.1') { - return 'localhost' - } - - // Production Supabase URL pattern: https://{project-ref}.supabase.co - const supabasePattern = /^([a-z0-9]+)\.supabase\.co$/ - const match = url.hostname.match(supabasePattern) - - if (match) { - return match[1] - } - - return null - } catch { - return null - } -} - -/** - * Create authenticated session via dev endpoint and inject into Playwright - * - * Convenience function that combines session creation and cookie injection - * into a single call. This is the recommended way to set up authenticated - * Playwright tests. - * - * @param page - Playwright page object - * @param email - Email for test user - * @param tier - Subscription tier (default: 'free') - * @param devEndpointUrl - URL of dev-session endpoint (default: http://localhost:3001) - * @returns Promise resolving to session data including API key - * - * @example - * ```typescript - * // Create session and inject cookies in one call - * const { session, apiKey } = await createAuthenticatedSession( - * page, - * 'test@example.com', - * 'free' - * ) - * - * // Navigate to protected route (already authenticated) - * await page.goto('http://localhost:3001/dashboard') - * - * // Use API key for backend requests - * const response = await fetch('http://localhost:3000/api/subscriptions/current', { - * headers: { 'Authorization': `Bearer ${apiKey}` } - * }) - * ``` - */ -export async function createAuthenticatedSession( - page: Page, - email: string, - tier: 'free' | 'solo' | 'team' = 'free', - devEndpointUrl: string = 'http://localhost:3001/auth/dev-session' -): Promise<{ - userId: string - email: string - session: DevSession - apiKey?: string - message: string -}> { - // Call dev-session endpoint - const response = await fetch(devEndpointUrl, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ email, tier }) - }) - - if (!response.ok) { - const errorText = await response.text() - throw new Error(`Failed to create dev session: ${response.status} ${errorText}`) - } - - const data = await response.json() - - // Inject session cookies into page context - await injectSessionCookies(page, data.session) - - return data -} diff --git a/web/lib/supabase-server.ts b/web/lib/supabase-server.ts deleted file mode 100644 index 2d5d5d4d..00000000 --- a/web/lib/supabase-server.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { createServerClient } from '@supabase/ssr' -import { cookies } from 'next/headers' - -export function createClient() { - const cookieStore = cookies() - - return createServerClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - { - cookies: { - getAll() { - return cookieStore.getAll() - }, - setAll(cookiesToSet) { - try { - cookiesToSet.forEach(({ name, value, options }) => - cookieStore.set(name, value, options) - ) - } catch { - // The `setAll` method was called from a Server Component. - // This can be ignored if you have middleware refreshing - // user sessions. - } - }, - }, - } - ) -} diff --git a/web/lib/supabase.ts b/web/lib/supabase.ts deleted file mode 100644 index 792b4570..00000000 --- a/web/lib/supabase.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { createBrowserClient } from '@supabase/ssr' - -export function createClient() { - return createBrowserClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY! - ) -} diff --git a/web/middleware.ts b/web/middleware.ts deleted file mode 100644 index 1efe4ba2..00000000 --- a/web/middleware.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { createServerClient } from '@supabase/ssr' -import { NextResponse, type NextRequest } from 'next/server' - -export async function middleware(request: NextRequest) { - let supabaseResponse = NextResponse.next({ - request, - }) - - const supabase = createServerClient( - process.env.NEXT_PUBLIC_SUPABASE_URL!, - process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, - { - cookies: { - getAll() { - return request.cookies.getAll() - }, - setAll(cookiesToSet) { - cookiesToSet.forEach(({ name, value, options }) => request.cookies.set(name, value)) - supabaseResponse = NextResponse.next({ - request, - }) - cookiesToSet.forEach(({ name, value, options }) => - supabaseResponse.cookies.set(name, value, options) - ) - }, - }, - } - ) - - // IMPORTANT: Avoid writing any logic between createServerClient and - // supabase.auth.getUser(). A simple mistake could make it very hard to debug - // issues with users being randomly logged out. - - const { - data: { user }, - } = await supabase.auth.getUser() - - // Only dashboard and MCP configuration require OAuth session - // Other routes work with API key authentication (handled by backend) - const oauthOnlyRoutes = ['/dashboard', '/mcp'] - const requiresOAuth = oauthOnlyRoutes.some((route) => - request.nextUrl.pathname.startsWith(route) - ) - - if (requiresOAuth && !user) { - return NextResponse.redirect(new URL('/login', request.url)) - } - - return supabaseResponse -} - -export const config = { - matcher: [ - '/((?!_next/static|_next/image|favicon.ico|auth/dev-session|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)', - ], -} diff --git a/web/next.config.js b/web/next.config.js deleted file mode 100644 index 1fa3d089..00000000 --- a/web/next.config.js +++ /dev/null @@ -1,7 +0,0 @@ -/** @type {import('next').NextConfig} */ -const nextConfig = { - // No special configuration needed initially - reactStrictMode: true, -} - -module.exports = nextConfig diff --git a/web/package.json b/web/package.json deleted file mode 100644 index 6f10f792..00000000 --- a/web/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "kotadb-web", - "version": "0.1.0", - "private": true, - "scripts": { - "dev": "next dev -p 3001", - "build": "next build", - "start": "next start -p 3001", - "lint": "next lint", - "test:e2e": "playwright test", - "test:e2e:ui": "playwright test --ui" - }, - "dependencies": { - "@kotadb/shared": "workspace:*", - "@stripe/stripe-js": "^8.1.0", - "@supabase/ssr": "^0.7.0", - "@supabase/supabase-js": "^2.76.1", - "@vercel/analytics": "^1.5.0", - "@vercel/speed-insights": "^1.2.0", - "next": "^14.2.0", - "react": "^18.3.0", - "react-dom": "^18.3.0" - }, - "devDependencies": { - "@playwright/test": "^1.47.0", - "@types/node": "^20", - "@types/react": "^18", - "@types/react-dom": "^18", - "autoprefixer": "^10.0.1", - "eslint": "^8", - "eslint-config-next": "14.2.0", - "postcss": "^8", - "tailwindcss": "^3.4.1", - "typescript": "^5" - } -} diff --git a/web/postcss.config.js b/web/postcss.config.js deleted file mode 100644 index 33ad091d..00000000 --- a/web/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -} diff --git a/web/tailwind.config.ts b/web/tailwind.config.ts deleted file mode 100644 index 967bdb32..00000000 --- a/web/tailwind.config.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type { Config } from 'tailwindcss' - -const config: Config = { - content: [ - './pages/**/*.{js,ts,jsx,tsx,mdx}', - './components/**/*.{js,ts,jsx,tsx,mdx}', - './app/**/*.{js,ts,jsx,tsx,mdx}', - ], - theme: { - extend: { - colors: { - background: 'var(--background)', - foreground: 'var(--foreground)', - }, - backdropBlur: { - xs: '2px', - sm: '4px', - md: '10px', - lg: '16px', - xl: '24px', - }, - backdropSaturate: { - 180: '180%', - }, - }, - }, - plugins: [], -} -export default config diff --git a/web/tests/auth/dev-session.test.ts b/web/tests/auth/dev-session.test.ts deleted file mode 100644 index be9b1b10..00000000 --- a/web/tests/auth/dev-session.test.ts +++ /dev/null @@ -1,209 +0,0 @@ -import { test, expect } from '@playwright/test' -import { createAuthenticatedSession, injectSessionCookies } from '../../lib/playwright-helpers' - -/** - * Integration tests for /auth/dev-session endpoint - * - * These tests validate: - * 1. Production environment guard blocks requests - * 2. Session creation succeeds in dev mode - * 3. Health check endpoint returns correct status - * 4. Cookie injection enables navigation to protected routes - * 5. API key works with backend endpoints - * - * Prerequisites: - * - Next.js dev server running on http://localhost:3001 - * - Backend API server running on http://localhost:3000 - * - Supabase Local running on http://localhost:54326 - */ - -const DEV_ENDPOINT = 'http://localhost:3001/auth/dev-session' -const API_BASE = 'http://localhost:3000' -const WEB_BASE = 'http://localhost:3001' - -test.describe('Dev Session Endpoint', () => { - test('should block requests in production environment', async ({ page }) => { - // Mock production environment by setting headers - // Note: This test may need to be adapted based on how environment - // variables are checked (they're typically set at build time) - test.skip(true, 'Environment variables cannot be mocked in runtime tests') - }) - - test('should return availability status via GET', async ({ request }) => { - const response = await request.get(DEV_ENDPOINT) - expect(response.ok()).toBeTruthy() - - const data = await response.json() - expect(data).toHaveProperty('available') - expect(data).toHaveProperty('environment') - expect(data.available).toBe(true) // Should be available in dev mode - }) - - test('should create test user and return session tokens', async ({ request }) => { - const email = `test-${Date.now()}@playwright.test` - - const response = await request.post(DEV_ENDPOINT, { - data: { - email, - tier: 'free' - } - }) - - expect(response.ok()).toBeTruthy() - - const data = await response.json() - - // Validate response structure - expect(data).toHaveProperty('userId') - expect(data).toHaveProperty('email') - expect(data).toHaveProperty('session') - expect(data).toHaveProperty('message') - - // Validate session structure - expect(data.session).toHaveProperty('access_token') - expect(data.session).toHaveProperty('refresh_token') - expect(data.session).toHaveProperty('expires_in') - expect(data.session).toHaveProperty('expires_at') - - // Validate session tokens are non-empty - expect(data.session.access_token).toBeTruthy() - expect(data.session.refresh_token).toBeTruthy() - expect(data.session.expires_in).toBeGreaterThan(0) - expect(data.session.expires_at).toBeGreaterThan(Date.now() / 1000) - - // Validate email matches request - expect(data.email).toBe(email) - }) - - test('should handle duplicate user creation gracefully', async ({ request }) => { - const email = `duplicate-test-${Date.now()}@playwright.test` - - // Create user first time - const response1 = await request.post(DEV_ENDPOINT, { - data: { email, tier: 'free' } - }) - expect(response1.ok()).toBeTruthy() - - // Create same user second time (should succeed with existing user) - const response2 = await request.post(DEV_ENDPOINT, { - data: { email, tier: 'free' } - }) - expect(response2.ok()).toBeTruthy() - - const data = await response2.json() - expect(data.email).toBe(email) - expect(data.session.access_token).toBeTruthy() - }) - - test('should reject invalid email format', async ({ request }) => { - const response = await request.post(DEV_ENDPOINT, { - data: { - email: 'invalid-email', - tier: 'free' - } - }) - - expect(response.status()).toBe(400) - const data = await response.json() - expect(data).toHaveProperty('error') - expect(data.error).toContain('Invalid request body') - }) - - test('should default to free tier when tier is omitted', async ({ request }) => { - const email = `default-tier-${Date.now()}@playwright.test` - - const response = await request.post(DEV_ENDPOINT, { - data: { email } - }) - - expect(response.ok()).toBeTruthy() - const data = await response.json() - expect(data.email).toBe(email) - }) - - test('should generate API key and include in response', async ({ request }) => { - const email = `api-key-test-${Date.now()}@playwright.test` - - const response = await request.post(DEV_ENDPOINT, { - data: { email, tier: 'free' } - }) - - expect(response.ok()).toBeTruthy() - - const data = await response.json() - - // API key should be present (unless backend is unavailable) - // If backend API is running, apiKey should be defined - if (data.apiKey) { - expect(data.apiKey).toMatch(/^kota_/) - } - }) - - test('should inject cookies and enable authenticated navigation', async ({ page }) => { - const email = `auth-nav-test-${Date.now()}@playwright.test` - - // Create session via helper - const { session } = await createAuthenticatedSession(page, email, 'free', DEV_ENDPOINT) - - expect(session.access_token).toBeTruthy() - - // Navigate to a page (this would be a protected route in real app) - // For now, just verify we can navigate with cookies set - await page.goto(WEB_BASE) - - // Verify cookies were set - const cookies = await page.context().cookies() - const authCookie = cookies.find(c => c.name.includes('auth-token')) - - expect(authCookie).toBeDefined() - expect(authCookie?.value).toContain(session.access_token) - }) - - test('should work with injectSessionCookies helper', async ({ page, request }) => { - const email = `inject-helper-${Date.now()}@playwright.test` - - // Create session directly via API - const response = await request.post(DEV_ENDPOINT, { - data: { email, tier: 'free' } - }) - - const data = await response.json() - - // Inject cookies using helper - await injectSessionCookies(page, data.session) - - // Verify cookies were set - const cookies = await page.context().cookies() - const authCookie = cookies.find(c => c.name.includes('auth-token')) - - expect(authCookie).toBeDefined() - expect(authCookie?.value).toContain(data.session.access_token) - }) - - test('should generate valid API key for backend requests', async ({ request }) => { - const email = `backend-api-${Date.now()}@playwright.test` - - // Create session and get API key - const sessionResponse = await request.post(DEV_ENDPOINT, { - data: { email, tier: 'free' } - }) - - const data = await sessionResponse.json() - - // Skip if API key generation failed - test.skip(!data.apiKey, 'API key not generated (backend may be unavailable)') - - // Test API key with backend endpoint - const apiResponse = await request.get(`${API_BASE}/api/subscriptions/current`, { - headers: { - 'Authorization': `Bearer ${data.apiKey}` - } - }) - - // Should not get 401 Unauthorized (API key is valid) - expect(apiResponse.status()).not.toBe(401) - - // May get 404 if no subscription exists, but that's OK - // The important thing is that the API key authenticated successfully - }) -}) diff --git a/web/tsconfig.json b/web/tsconfig.json deleted file mode 100644 index 43abcdaa..00000000 --- a/web/tsconfig.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2017", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "strict": true, - "noEmit": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "bundler", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "preserve", - "incremental": true, - "plugins": [ - { - "name": "next" - } - ], - "paths": { - "@/*": ["./*"], - "@shared/*": ["../shared/*"] - } - }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], - "exclude": ["node_modules"] -}