diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..c7ac7de47 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,165 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Liwords (Woogles.io) is a web-based crossword board game platform with real-time multiplayer capabilities. The project consists of: + +- **Backend API Server**: Go-based API server using Connect RPC (gRPC-compatible) +- **Frontend**: React/TypeScript UI built with RSBuild +- **Socket Server**: Separate Go service for real-time communication (in liwords-socket repo) +- **Game Engine**: Macondo library provides core game logic (in macondo repo) +- **Infrastructure**: PostgreSQL, Redis, NATS messaging, S3 storage + +## Key Commands + +### Frontend Development (liwords-ui/) +```bash +# Install dependencies +npm install + +# Run development server +npm start + +# Build production +npm run build + +# Run tests +npm test + +# Lint code +npm run lint + +# Format code +npm run format + +# Full pre-commit check +npm run isready +``` + +### Backend Development +```bash +# Run API server locally +go run cmd/liwords-api/*.go + +# Run tests +go test ./... + +# Generate code from proto/sql +go generate + +# Run migrations up +migrate -database "postgres://postgres:pass@localhost:5432/liwords?sslmode=disable" -path db/migrations up + +# Run migrations down +./migrate_down.sh +``` + +### Docker Development +```bash +# Full stack with Docker +docker compose up + +# Services only (for hybrid development) +docker compose -f dc-local-services.yml up + +# Register a bot user +./scripts/utilities/register-bot.sh BotUsername +``` + +## Architecture + +### Service Communication +- **API Server** → **Socket Server**: Via NATS pub/sub for real-time events +- **Frontend** → **API Server**: Connect RPC over HTTP +- **Frontend** → **Socket Server**: WebSocket for real-time updates +- **API Server** → **PostgreSQL**: Primary data store +- **API Server** → **Redis**: Session storage, presence, chat history + +### Key Patterns + +1. **Code Generation**: + - Proto files → Go/TypeScript code via `buf generate` + - SQL queries → Go code via `sqlc generate` + - Run `go generate` after modifying .proto or .sql files + +2. **Service Structure**: + - Each domain has a service in `pkg/` (e.g., `pkg/gameplay`, `pkg/tournament`) + - Services expose Connect RPC handlers + - Database access through generated sqlc code in `pkg/stores/` + +3. **Real-time Events**: + - Game events flow through NATS + - Socket server broadcasts to connected clients + - Event types defined in `api/proto/ipc/` + +4. **Authentication**: + - JWT tokens for API authentication + - Session cookies for web clients + - Bot accounts have `internal_bot` flag + +### Important Directories + +- `api/proto/`: Protocol buffer definitions +- `cmd/`: Entry points for various services +- `pkg/`: Core business logic and services +- `db/migrations/`: PostgreSQL schema migrations +- `db/queries/`: SQL queries for sqlc +- `liwords-ui/src/`: Frontend React code +- `rpc/`: Generated RPC code + +## Testing + +### Running Tests +```bash +# Backend unit tests +go test ./pkg/... + +# Frontend tests +cd liwords-ui && npm test + +# Integration tests (requires running services) +go test ./pkg/integration_testing/... +``` + +### Test Patterns +- Go tests use standard `testing` package +- Frontend uses Vitest +- Test data in `testdata/` directories +- Golden files for snapshot testing + +## Common Development Tasks + +### Adding a New RPC Endpoint +1. Define the service method in `api/proto/[service]/[service].proto` +2. Run `go generate` to generate code +3. Implement the handler in `pkg/[service]/service.go` +4. Add the service to the router in `cmd/liwords-api/main.go` + +### Adding a Database Query +1. Write the SQL query in `db/queries/[domain].sql` +2. Run `go generate` to generate the Go code +3. Use the generated methods in your service + +### Modifying the Database Schema +1. Create a new migration: `./gen_migration.sh [migration_name]` +2. Write the up/down SQL in `db/migrations/` +3. Run migrations: `migrate -database "..." -path db/migrations up` + +## Environment Variables + +Key environment variables (see docker-compose.yml for full list): +- `DB_*`: PostgreSQL connection settings +- `REDIS_URL`: Redis connection +- `NATS_URL`: NATS server URL +- `SECRET_KEY`: JWT signing key +- `MACONDO_DATA_PATH`: Path to game data files +- `AWS_*`: S3 configuration for uploads + +## Debugging Tips + +- Enable debug logging: `DEBUG=1` +- Access pprof: http://localhost:8001/debug/pprof/ +- NATS monitoring: Connect to NATS and subscribe to `>` for all messages +- Database queries are logged when `DEBUG=1` \ No newline at end of file diff --git a/aws/cfn/daily-maintenance.yaml b/aws/cfn/daily-maintenance.yaml index 485431d95..bd8f60532 100644 --- a/aws/cfn/daily-maintenance.yaml +++ b/aws/cfn/daily-maintenance.yaml @@ -25,7 +25,7 @@ Resources: "name": "maintenance", "command": [ "/opt/maintenance", - "integrations-refresher" + "integrations-refresher,partition-creator,cancelled-games-cleanup" ] } ] diff --git a/cmd/maintenance/main.go b/cmd/maintenance/main.go index 623ed6459..767cce59f 100644 --- a/cmd/maintenance/main.go +++ b/cmd/maintenance/main.go @@ -5,6 +5,7 @@ import ( "context" "encoding/json" "errors" + "fmt" "io" "net/http" "net/url" @@ -56,6 +57,12 @@ func main() { case "sub-badge-updater": err := SubBadgeUpdater() log.Err(err).Msg("ran subBadgeUpdater") + case "partition-creator": + err := PartitionCreator() + log.Err(err).Msg("ran partitionCreator") + case "cancelled-games-cleanup": + err := CancelledGamesCleanup() + log.Err(err).Msg("ran cancelledGamesCleanup") default: log.Error().Str("command", command).Msg("command not recognized") } @@ -350,3 +357,136 @@ func updateBadges(q *models.Queries, pool *pgxpool.Pool) error { return tx.Commit(ctx) } + +// PartitionCreator creates monthly partitions for the past_games table +// It only runs during the last 5 days of the month to minimize overhead +func PartitionCreator() error { + // Only run on days 26-31 of the month + now := time.Now() + if now.Day() < 26 { + log.Info().Int("day", now.Day()).Msg("skipping partition creation - not end of month") + return nil + } + + log.Info().Msg("checking for partition creation") + cfg := &config.Config{} + cfg.Load(os.Args[1:]) + + if cfg.Debug { + zerolog.SetGlobalLevel(zerolog.DebugLevel) + } else { + zerolog.SetGlobalLevel(zerolog.InfoLevel) + } + + dbCfg, err := pgxpool.ParseConfig(cfg.DBConnUri) + if err != nil { + return err + } + ctx := context.Background() + dbPool, err := pgxpool.NewWithConfig(ctx, dbCfg) + if err != nil { + return err + } + defer dbPool.Close() + + // Check for existing partitions + rows, err := dbPool.Query(ctx, ` + SELECT tablename + FROM pg_tables + WHERE schemaname = 'public' + AND tablename LIKE 'past_games_%' + ORDER BY tablename + `) + if err != nil { + return err + } + defer rows.Close() + + existingPartitions := make(map[string]bool) + for rows.Next() { + var tableName string + if err := rows.Scan(&tableName); err != nil { + return err + } + existingPartitions[tableName] = true + } + + // Create partitions for next 3 months if they don't exist + partitionsCreated := 0 + for i := 0; i < 3; i++ { + targetDate := now.AddDate(0, i+1, 0) + year := targetDate.Year() + month := targetDate.Month() + + partitionName := fmt.Sprintf("past_games_%04d_%02d", year, month) + + if existingPartitions[partitionName] { + log.Debug().Str("partition", partitionName).Msg("partition already exists") + continue + } + + // Calculate the start and end dates for the partition + startDate := time.Date(year, month, 1, 0, 0, 0, 0, time.UTC) + endDate := startDate.AddDate(0, 1, 0) + + createSQL := fmt.Sprintf(` + CREATE TABLE %s PARTITION OF past_games + FOR VALUES FROM ('%s') TO ('%s') + `, partitionName, startDate.Format("2006-01-02"), endDate.Format("2006-01-02")) + + _, err := dbPool.Exec(ctx, createSQL) + if err != nil { + log.Err(err).Str("partition", partitionName).Msg("failed to create partition") + return err + } + + log.Info().Str("partition", partitionName). + Str("from", startDate.Format("2006-01-02")). + Str("to", endDate.Format("2006-01-02")). + Msg("created partition") + partitionsCreated++ + } + + log.Info().Int("partitions_created", partitionsCreated).Msg("partition creation complete") + return nil +} + +// CancelledGamesCleanup deletes cancelled games older than 7 days +func CancelledGamesCleanup() error { + log.Info().Msg("starting cancelled games cleanup") + cfg := &config.Config{} + cfg.Load(os.Args[1:]) + + if cfg.Debug { + zerolog.SetGlobalLevel(zerolog.DebugLevel) + } else { + zerolog.SetGlobalLevel(zerolog.InfoLevel) + } + + dbCfg, err := pgxpool.ParseConfig(cfg.DBConnUri) + if err != nil { + return err + } + ctx := context.Background() + dbPool, err := pgxpool.NewWithConfig(ctx, dbCfg) + if err != nil { + return err + } + defer dbPool.Close() + + // Delete cancelled games older than 7 days + // game_end_reason = 7 is CANCELLED + result, err := dbPool.Exec(ctx, ` + DELETE FROM games + WHERE game_end_reason = 7 + AND created_at < NOW() - INTERVAL '7 days' + `) + if err != nil { + return err + } + + rowsDeleted := result.RowsAffected() + log.Info().Int64("games_deleted", rowsDeleted).Msg("cancelled games cleanup complete") + + return nil +} diff --git a/db/migrations/202508250421_partitioned_games.down.sql b/db/migrations/202508250421_partitioned_games.down.sql new file mode 100644 index 000000000..ffc28bd3b --- /dev/null +++ b/db/migrations/202508250421_partitioned_games.down.sql @@ -0,0 +1,5 @@ +BEGIN; + +DROP TABLE past_games; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508250421_partitioned_games.up.sql b/db/migrations/202508250421_partitioned_games.up.sql new file mode 100644 index 000000000..b2dfa1860 --- /dev/null +++ b/db/migrations/202508250421_partitioned_games.up.sql @@ -0,0 +1,26 @@ +BEGIN; + +CREATE TABLE past_games ( +-- id SERIAL PRIMARY KEY, -- experiment with this. we might not need it? + gid text NOT NULL, + created_at timestamp with time zone NOT NULL, + game_end_reason SMALLINT NOT NULL, + winner_idx SMALLINT, -- 0, 1 for first or second, -1 for draw. NULL if there's no winner. + game_request jsonb NOT NULL DEFAULT '{}', + game_document jsonb NOT NULL DEFAULT '{}', + stats jsonb NOT NULL DEFAULT '{}', + quickdata jsonb NOT NULL DEFAULT '{}', + type SMALLINT NOT NULL, + tournament_data jsonb -- can be null +) PARTITION BY RANGE (created_at); + +CREATE INDEX idx_past_games_tournament_id + ON public.past_games USING hash(((tournament_data ->>'Id'::text))); +CREATE INDEX idx_past_games_gid ON public.past_games USING btree (gid); +CREATE INDEX idx_past_games_rematch_req_idx + ON public.past_games USING hash (((quickdata ->> 'o'::text))); +CREATE INDEX idx_past_games_created_at + ON public.past_games USING btree (created_at); + + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508250511_improve_game_players.down.sql b/db/migrations/202508250511_improve_game_players.down.sql new file mode 100644 index 000000000..6d6f80e94 --- /dev/null +++ b/db/migrations/202508250511_improve_game_players.down.sql @@ -0,0 +1,18 @@ +BEGIN; + +-- Revert to original game_players structure +DROP TABLE IF EXISTS game_players; + +CREATE TABLE game_players ( + game_id integer NOT NULL, + player_id integer NOT NULL, + player_index SMALLINT, + FOREIGN KEY (game_id) REFERENCES games (id), + FOREIGN KEY (player_id) REFERENCES users (id), + PRIMARY KEY (game_id, player_id) +); + +-- Remove migration status column +ALTER TABLE games DROP COLUMN IF EXISTS migration_status; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508250511_improve_game_players.up.sql b/db/migrations/202508250511_improve_game_players.up.sql new file mode 100644 index 000000000..61f7e7477 --- /dev/null +++ b/db/migrations/202508250511_improve_game_players.up.sql @@ -0,0 +1,44 @@ +BEGIN; + +-- Drop the existing empty game_players table +DROP TABLE IF EXISTS game_players; + +-- Create improved game_players table +CREATE TABLE game_players ( + game_uuid text NOT NULL, + player_id integer NOT NULL, + player_index SMALLINT NOT NULL CHECK (player_index IN (0, 1)), + + -- Game outcome data + score integer NOT NULL, + won boolean, -- true = won, false = lost, null = tie + game_end_reason SMALLINT NOT NULL, + + -- Rating data (nullable for unrated games) + rating_before integer, + rating_after integer, + rating_delta integer, -- convenience field: rating_after - rating_before + + -- Temporal and type data + created_at timestamp with time zone NOT NULL, + game_type SMALLINT NOT NULL, + + -- Opponent info (denormalized for convenience) + opponent_id integer NOT NULL, + opponent_score integer NOT NULL, + + FOREIGN KEY (player_id) REFERENCES users (id), + FOREIGN KEY (opponent_id) REFERENCES users (id), + PRIMARY KEY (game_uuid, player_id) +); + +-- Essential indexes for common queries +CREATE INDEX idx_game_players_player_created ON game_players(player_id, created_at DESC); +CREATE INDEX idx_game_players_opponents ON game_players(player_id, opponent_id, created_at DESC); +CREATE INDEX idx_game_players_rating_change ON game_players(player_id, rating_delta) WHERE rating_delta IS NOT NULL; + +-- Add migration status to games table for tracking +ALTER TABLE games ADD COLUMN IF NOT EXISTS migration_status SMALLINT DEFAULT 0; +-- 0 = not migrated, 1 = migrated to past_games, 2 = archived to S3 + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508291311_create_past_games_partitions.down.sql b/db/migrations/202508291311_create_past_games_partitions.down.sql new file mode 100644 index 000000000..64e10da19 --- /dev/null +++ b/db/migrations/202508291311_create_past_games_partitions.down.sql @@ -0,0 +1,89 @@ +BEGIN; + +-- Drop all partition tables for past_games +-- 2020 partitions +DROP TABLE IF EXISTS past_games_2020_08; +DROP TABLE IF EXISTS past_games_2020_09; +DROP TABLE IF EXISTS past_games_2020_10; +DROP TABLE IF EXISTS past_games_2020_11; +DROP TABLE IF EXISTS past_games_2020_12; + +-- 2021 partitions +DROP TABLE IF EXISTS past_games_2021_01; +DROP TABLE IF EXISTS past_games_2021_02; +DROP TABLE IF EXISTS past_games_2021_03; +DROP TABLE IF EXISTS past_games_2021_04; +DROP TABLE IF EXISTS past_games_2021_05; +DROP TABLE IF EXISTS past_games_2021_06; +DROP TABLE IF EXISTS past_games_2021_07; +DROP TABLE IF EXISTS past_games_2021_08; +DROP TABLE IF EXISTS past_games_2021_09; +DROP TABLE IF EXISTS past_games_2021_10; +DROP TABLE IF EXISTS past_games_2021_11; +DROP TABLE IF EXISTS past_games_2021_12; + +-- 2022 partitions +DROP TABLE IF EXISTS past_games_2022_01; +DROP TABLE IF EXISTS past_games_2022_02; +DROP TABLE IF EXISTS past_games_2022_03; +DROP TABLE IF EXISTS past_games_2022_04; +DROP TABLE IF EXISTS past_games_2022_05; +DROP TABLE IF EXISTS past_games_2022_06; +DROP TABLE IF EXISTS past_games_2022_07; +DROP TABLE IF EXISTS past_games_2022_08; +DROP TABLE IF EXISTS past_games_2022_09; +DROP TABLE IF EXISTS past_games_2022_10; +DROP TABLE IF EXISTS past_games_2022_11; +DROP TABLE IF EXISTS past_games_2022_12; + +-- 2023 partitions +DROP TABLE IF EXISTS past_games_2023_01; +DROP TABLE IF EXISTS past_games_2023_02; +DROP TABLE IF EXISTS past_games_2023_03; +DROP TABLE IF EXISTS past_games_2023_04; +DROP TABLE IF EXISTS past_games_2023_05; +DROP TABLE IF EXISTS past_games_2023_06; +DROP TABLE IF EXISTS past_games_2023_07; +DROP TABLE IF EXISTS past_games_2023_08; +DROP TABLE IF EXISTS past_games_2023_09; +DROP TABLE IF EXISTS past_games_2023_10; +DROP TABLE IF EXISTS past_games_2023_11; +DROP TABLE IF EXISTS past_games_2023_12; + +-- 2024 partitions +DROP TABLE IF EXISTS past_games_2024_01; +DROP TABLE IF EXISTS past_games_2024_02; +DROP TABLE IF EXISTS past_games_2024_03; +DROP TABLE IF EXISTS past_games_2024_04; +DROP TABLE IF EXISTS past_games_2024_05; +DROP TABLE IF EXISTS past_games_2024_06; +DROP TABLE IF EXISTS past_games_2024_07; +DROP TABLE IF EXISTS past_games_2024_08; +DROP TABLE IF EXISTS past_games_2024_09; +DROP TABLE IF EXISTS past_games_2024_10; +DROP TABLE IF EXISTS past_games_2024_11; +DROP TABLE IF EXISTS past_games_2024_12; + +-- 2025 partitions +DROP TABLE IF EXISTS past_games_2025_01; +DROP TABLE IF EXISTS past_games_2025_02; +DROP TABLE IF EXISTS past_games_2025_03; +DROP TABLE IF EXISTS past_games_2025_04; +DROP TABLE IF EXISTS past_games_2025_05; +DROP TABLE IF EXISTS past_games_2025_06; +DROP TABLE IF EXISTS past_games_2025_07; +DROP TABLE IF EXISTS past_games_2025_08; +DROP TABLE IF EXISTS past_games_2025_09; +DROP TABLE IF EXISTS past_games_2025_10; +DROP TABLE IF EXISTS past_games_2025_11; +DROP TABLE IF EXISTS past_games_2025_12; + +-- 2026 partitions +DROP TABLE IF EXISTS past_games_2026_01; +DROP TABLE IF EXISTS past_games_2026_02; +DROP TABLE IF EXISTS past_games_2026_03; +DROP TABLE IF EXISTS past_games_2026_04; +DROP TABLE IF EXISTS past_games_2026_05; +DROP TABLE IF EXISTS past_games_2026_06; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508291311_create_past_games_partitions.up.sql b/db/migrations/202508291311_create_past_games_partitions.up.sql new file mode 100644 index 000000000..93867a903 --- /dev/null +++ b/db/migrations/202508291311_create_past_games_partitions.up.sql @@ -0,0 +1,163 @@ +BEGIN; + +-- Create monthly partitions for past_games table +-- Starting from August 2020 (earliest game: 2020-08-15) through January 2026 +-- The TO value is exclusive, so '2020-09-01' means up to but not including Sept 1st + +-- 2020 partitions (Aug-Dec) +CREATE TABLE past_games_2020_08 PARTITION OF past_games + FOR VALUES FROM ('2020-08-01') TO ('2020-09-01'); +CREATE TABLE past_games_2020_09 PARTITION OF past_games + FOR VALUES FROM ('2020-09-01') TO ('2020-10-01'); +CREATE TABLE past_games_2020_10 PARTITION OF past_games + FOR VALUES FROM ('2020-10-01') TO ('2020-11-01'); +CREATE TABLE past_games_2020_11 PARTITION OF past_games + FOR VALUES FROM ('2020-11-01') TO ('2020-12-01'); +CREATE TABLE past_games_2020_12 PARTITION OF past_games + FOR VALUES FROM ('2020-12-01') TO ('2021-01-01'); + +-- 2021 partitions (all 12 months) +CREATE TABLE past_games_2021_01 PARTITION OF past_games + FOR VALUES FROM ('2021-01-01') TO ('2021-02-01'); +CREATE TABLE past_games_2021_02 PARTITION OF past_games + FOR VALUES FROM ('2021-02-01') TO ('2021-03-01'); +CREATE TABLE past_games_2021_03 PARTITION OF past_games + FOR VALUES FROM ('2021-03-01') TO ('2021-04-01'); +CREATE TABLE past_games_2021_04 PARTITION OF past_games + FOR VALUES FROM ('2021-04-01') TO ('2021-05-01'); +CREATE TABLE past_games_2021_05 PARTITION OF past_games + FOR VALUES FROM ('2021-05-01') TO ('2021-06-01'); +CREATE TABLE past_games_2021_06 PARTITION OF past_games + FOR VALUES FROM ('2021-06-01') TO ('2021-07-01'); +CREATE TABLE past_games_2021_07 PARTITION OF past_games + FOR VALUES FROM ('2021-07-01') TO ('2021-08-01'); +CREATE TABLE past_games_2021_08 PARTITION OF past_games + FOR VALUES FROM ('2021-08-01') TO ('2021-09-01'); +CREATE TABLE past_games_2021_09 PARTITION OF past_games + FOR VALUES FROM ('2021-09-01') TO ('2021-10-01'); +CREATE TABLE past_games_2021_10 PARTITION OF past_games + FOR VALUES FROM ('2021-10-01') TO ('2021-11-01'); +CREATE TABLE past_games_2021_11 PARTITION OF past_games + FOR VALUES FROM ('2021-11-01') TO ('2021-12-01'); +CREATE TABLE past_games_2021_12 PARTITION OF past_games + FOR VALUES FROM ('2021-12-01') TO ('2022-01-01'); + +-- 2022 partitions (all 12 months) +CREATE TABLE past_games_2022_01 PARTITION OF past_games + FOR VALUES FROM ('2022-01-01') TO ('2022-02-01'); +CREATE TABLE past_games_2022_02 PARTITION OF past_games + FOR VALUES FROM ('2022-02-01') TO ('2022-03-01'); +CREATE TABLE past_games_2022_03 PARTITION OF past_games + FOR VALUES FROM ('2022-03-01') TO ('2022-04-01'); +CREATE TABLE past_games_2022_04 PARTITION OF past_games + FOR VALUES FROM ('2022-04-01') TO ('2022-05-01'); +CREATE TABLE past_games_2022_05 PARTITION OF past_games + FOR VALUES FROM ('2022-05-01') TO ('2022-06-01'); +CREATE TABLE past_games_2022_06 PARTITION OF past_games + FOR VALUES FROM ('2022-06-01') TO ('2022-07-01'); +CREATE TABLE past_games_2022_07 PARTITION OF past_games + FOR VALUES FROM ('2022-07-01') TO ('2022-08-01'); +CREATE TABLE past_games_2022_08 PARTITION OF past_games + FOR VALUES FROM ('2022-08-01') TO ('2022-09-01'); +CREATE TABLE past_games_2022_09 PARTITION OF past_games + FOR VALUES FROM ('2022-09-01') TO ('2022-10-01'); +CREATE TABLE past_games_2022_10 PARTITION OF past_games + FOR VALUES FROM ('2022-10-01') TO ('2022-11-01'); +CREATE TABLE past_games_2022_11 PARTITION OF past_games + FOR VALUES FROM ('2022-11-01') TO ('2022-12-01'); +CREATE TABLE past_games_2022_12 PARTITION OF past_games + FOR VALUES FROM ('2022-12-01') TO ('2023-01-01'); + +-- 2023 partitions (all 12 months) +CREATE TABLE past_games_2023_01 PARTITION OF past_games + FOR VALUES FROM ('2023-01-01') TO ('2023-02-01'); +CREATE TABLE past_games_2023_02 PARTITION OF past_games + FOR VALUES FROM ('2023-02-01') TO ('2023-03-01'); +CREATE TABLE past_games_2023_03 PARTITION OF past_games + FOR VALUES FROM ('2023-03-01') TO ('2023-04-01'); +CREATE TABLE past_games_2023_04 PARTITION OF past_games + FOR VALUES FROM ('2023-04-01') TO ('2023-05-01'); +CREATE TABLE past_games_2023_05 PARTITION OF past_games + FOR VALUES FROM ('2023-05-01') TO ('2023-06-01'); +CREATE TABLE past_games_2023_06 PARTITION OF past_games + FOR VALUES FROM ('2023-06-01') TO ('2023-07-01'); +CREATE TABLE past_games_2023_07 PARTITION OF past_games + FOR VALUES FROM ('2023-07-01') TO ('2023-08-01'); +CREATE TABLE past_games_2023_08 PARTITION OF past_games + FOR VALUES FROM ('2023-08-01') TO ('2023-09-01'); +CREATE TABLE past_games_2023_09 PARTITION OF past_games + FOR VALUES FROM ('2023-09-01') TO ('2023-10-01'); +CREATE TABLE past_games_2023_10 PARTITION OF past_games + FOR VALUES FROM ('2023-10-01') TO ('2023-11-01'); +CREATE TABLE past_games_2023_11 PARTITION OF past_games + FOR VALUES FROM ('2023-11-01') TO ('2023-12-01'); +CREATE TABLE past_games_2023_12 PARTITION OF past_games + FOR VALUES FROM ('2023-12-01') TO ('2024-01-01'); + +-- 2024 partitions (all 12 months) +CREATE TABLE past_games_2024_01 PARTITION OF past_games + FOR VALUES FROM ('2024-01-01') TO ('2024-02-01'); +CREATE TABLE past_games_2024_02 PARTITION OF past_games + FOR VALUES FROM ('2024-02-01') TO ('2024-03-01'); +CREATE TABLE past_games_2024_03 PARTITION OF past_games + FOR VALUES FROM ('2024-03-01') TO ('2024-04-01'); +CREATE TABLE past_games_2024_04 PARTITION OF past_games + FOR VALUES FROM ('2024-04-01') TO ('2024-05-01'); +CREATE TABLE past_games_2024_05 PARTITION OF past_games + FOR VALUES FROM ('2024-05-01') TO ('2024-06-01'); +CREATE TABLE past_games_2024_06 PARTITION OF past_games + FOR VALUES FROM ('2024-06-01') TO ('2024-07-01'); +CREATE TABLE past_games_2024_07 PARTITION OF past_games + FOR VALUES FROM ('2024-07-01') TO ('2024-08-01'); +CREATE TABLE past_games_2024_08 PARTITION OF past_games + FOR VALUES FROM ('2024-08-01') TO ('2024-09-01'); +CREATE TABLE past_games_2024_09 PARTITION OF past_games + FOR VALUES FROM ('2024-09-01') TO ('2024-10-01'); +CREATE TABLE past_games_2024_10 PARTITION OF past_games + FOR VALUES FROM ('2024-10-01') TO ('2024-11-01'); +CREATE TABLE past_games_2024_11 PARTITION OF past_games + FOR VALUES FROM ('2024-11-01') TO ('2024-12-01'); +CREATE TABLE past_games_2024_12 PARTITION OF past_games + FOR VALUES FROM ('2024-12-01') TO ('2025-01-01'); + +-- 2025 partitions (all 12 months) +CREATE TABLE past_games_2025_01 PARTITION OF past_games + FOR VALUES FROM ('2025-01-01') TO ('2025-02-01'); +CREATE TABLE past_games_2025_02 PARTITION OF past_games + FOR VALUES FROM ('2025-02-01') TO ('2025-03-01'); +CREATE TABLE past_games_2025_03 PARTITION OF past_games + FOR VALUES FROM ('2025-03-01') TO ('2025-04-01'); +CREATE TABLE past_games_2025_04 PARTITION OF past_games + FOR VALUES FROM ('2025-04-01') TO ('2025-05-01'); +CREATE TABLE past_games_2025_05 PARTITION OF past_games + FOR VALUES FROM ('2025-05-01') TO ('2025-06-01'); +CREATE TABLE past_games_2025_06 PARTITION OF past_games + FOR VALUES FROM ('2025-06-01') TO ('2025-07-01'); +CREATE TABLE past_games_2025_07 PARTITION OF past_games + FOR VALUES FROM ('2025-07-01') TO ('2025-08-01'); +CREATE TABLE past_games_2025_08 PARTITION OF past_games + FOR VALUES FROM ('2025-08-01') TO ('2025-09-01'); +CREATE TABLE past_games_2025_09 PARTITION OF past_games + FOR VALUES FROM ('2025-09-01') TO ('2025-10-01'); +CREATE TABLE past_games_2025_10 PARTITION OF past_games + FOR VALUES FROM ('2025-10-01') TO ('2025-11-01'); +CREATE TABLE past_games_2025_11 PARTITION OF past_games + FOR VALUES FROM ('2025-11-01') TO ('2025-12-01'); +CREATE TABLE past_games_2025_12 PARTITION OF past_games + FOR VALUES FROM ('2025-12-01') TO ('2026-01-01'); + +-- 2026 partitions (Jan-Jun for future games) +CREATE TABLE past_games_2026_01 PARTITION OF past_games + FOR VALUES FROM ('2026-01-01') TO ('2026-02-01'); +CREATE TABLE past_games_2026_02 PARTITION OF past_games + FOR VALUES FROM ('2026-02-01') TO ('2026-03-01'); +CREATE TABLE past_games_2026_03 PARTITION OF past_games + FOR VALUES FROM ('2026-03-01') TO ('2026-04-01'); +CREATE TABLE past_games_2026_04 PARTITION OF past_games + FOR VALUES FROM ('2026-04-01') TO ('2026-05-01'); +CREATE TABLE past_games_2026_05 PARTITION OF past_games + FOR VALUES FROM ('2026-05-01') TO ('2026-06-01'); +CREATE TABLE past_games_2026_06 PARTITION OF past_games + FOR VALUES FROM ('2026-06-01') TO ('2026-07-01'); + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508291508_optimize_rematch_streaks.down.sql b/db/migrations/202508291508_optimize_rematch_streaks.down.sql new file mode 100644 index 000000000..a8c6254b7 --- /dev/null +++ b/db/migrations/202508291508_optimize_rematch_streaks.down.sql @@ -0,0 +1,14 @@ +BEGIN; + +-- Recreate the original rematch indexes +-- CREATE INDEX idx_past_games_rematch_req_idx +-- ON public.past_games USING hash (((quickdata ->> 'o'::text))); +-- CREATE INDEX rematch_req_idx ON public.games USING hash (((quickdata ->> 'o'::text))); + +-- Drop the game_players index +DROP INDEX IF EXISTS idx_game_players_orig_req; + +-- Remove the original_request_id column from game_players +ALTER TABLE game_players DROP COLUMN IF EXISTS original_request_id; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/202508291508_optimize_rematch_streaks.up.sql b/db/migrations/202508291508_optimize_rematch_streaks.up.sql new file mode 100644 index 000000000..7d79b7b27 --- /dev/null +++ b/db/migrations/202508291508_optimize_rematch_streaks.up.sql @@ -0,0 +1,14 @@ +BEGIN; + +-- Add original_request_id column to game_players table for efficient rematch streak queries +ALTER TABLE game_players ADD COLUMN original_request_id text; + +-- Create index for efficient lookups by original_request_id +CREATE INDEX idx_game_players_orig_req ON game_players(original_request_id); + +-- Drop the old rematch indexes since we'll query game_players instead of past_games/games +-- XXX: Drop these later, once we've completed the full migration. +-- DROP INDEX IF EXISTS idx_past_games_rematch_req_idx; +-- DROP INDEX IF EXISTS rematch_req_idx; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/20250906140000_create_game_metadata_table.down.sql b/db/migrations/20250906140000_create_game_metadata_table.down.sql new file mode 100644 index 000000000..5cf008c1c --- /dev/null +++ b/db/migrations/20250906140000_create_game_metadata_table.down.sql @@ -0,0 +1,4 @@ +-- Drop game_metadata table +DROP INDEX IF EXISTS idx_game_metadata_tournament; +DROP INDEX IF EXISTS idx_game_metadata_created_at; +DROP TABLE IF EXISTS game_metadata; \ No newline at end of file diff --git a/db/migrations/20250906140000_create_game_metadata_table.up.sql b/db/migrations/20250906140000_create_game_metadata_table.up.sql new file mode 100644 index 000000000..d38ba3191 --- /dev/null +++ b/db/migrations/20250906140000_create_game_metadata_table.up.sql @@ -0,0 +1,21 @@ +-- Create game_metadata table to store essential game information for completed games +-- This table stays unpartitioned for fast queries and is never archived +CREATE TABLE IF NOT EXISTS game_metadata ( + game_uuid TEXT PRIMARY KEY, + created_at TIMESTAMPTZ NOT NULL, + + -- Full GameRequest as JSONB (protojson format) + -- Contains: lexicon, rules, time settings, etc. + game_request JSONB NOT NULL, + + -- Tournament info (moved from past_games for fast access) + tournament_data JSONB DEFAULT NULL, + + -- Creation timestamp for ordering + created_at_idx TIMESTAMPTZ NOT NULL DEFAULT created_at +); + +-- Create indexes for common queries +CREATE INDEX IF NOT EXISTS idx_game_metadata_created_at ON game_metadata (created_at DESC); +CREATE INDEX IF NOT EXISTS idx_game_metadata_tournament ON game_metadata + USING GIN ((tournament_data->'Id')) WHERE tournament_data IS NOT NULL; \ No newline at end of file diff --git a/db/migrations/20250906140001_remove_metadata_from_past_games.down.sql b/db/migrations/20250906140001_remove_metadata_from_past_games.down.sql new file mode 100644 index 000000000..8e1e0a371 --- /dev/null +++ b/db/migrations/20250906140001_remove_metadata_from_past_games.down.sql @@ -0,0 +1,4 @@ +-- Restore game_request and tournament_data columns to past_games +-- Note: This will lose data if already migrated to game_metadata +ALTER TABLE past_games ADD COLUMN IF NOT EXISTS game_request BYTEA; +ALTER TABLE past_games ADD COLUMN IF NOT EXISTS tournament_data JSONB; \ No newline at end of file diff --git a/db/migrations/20250906140001_remove_metadata_from_past_games.up.sql b/db/migrations/20250906140001_remove_metadata_from_past_games.up.sql new file mode 100644 index 000000000..4b2b770e5 --- /dev/null +++ b/db/migrations/20250906140001_remove_metadata_from_past_games.up.sql @@ -0,0 +1,4 @@ +-- Remove game_request and tournament_data from past_games table +-- These are moved to game_metadata for better query performance +ALTER TABLE past_games DROP COLUMN IF EXISTS game_request; +ALTER TABLE past_games DROP COLUMN IF EXISTS tournament_data; \ No newline at end of file diff --git a/db/queries/games.sql b/db/queries/games.sql index 6239f1c1b..8c1c15e9f 100644 --- a/db/queries/games.sql +++ b/db/queries/games.sql @@ -1,11 +1,245 @@ --- name: GetGame :one -SELECT * FROM games WHERE uuid = @uuid; -- this is not even a uuid, sigh. +-- name: GetGameBasicInfo :one +SELECT id, uuid, game_end_reason, migration_status, created_at, updated_at, type +FROM games WHERE uuid = @uuid; + +-- name: GetGameFullData :one +SELECT * FROM games WHERE uuid = @uuid; -- name: GetGameOwner :one -SELECT +SELECT agm.creator_uuid, - u.username + u.username FROM annotated_game_metadata agm JOIN users u ON agm.creator_uuid = u.uuid WHERE agm.game_uuid = @game_uuid; +-- name: GetPastGame :one +SELECT * FROM past_games WHERE gid = @gid AND created_at = @created_at; + +-- name: GetLiveGameMetadata :one +SELECT uuid, quickdata, game_end_reason, winner_idx, request, created_at, updated_at, + tournament_data, tournament_id, type +FROM games +WHERE uuid = @uuid; + +-- name: GetPastGameMetadata :one +SELECT pg.game_end_reason, pg.winner_idx, gm.game_request, pg.quickdata, pg.type, gm.tournament_data +FROM past_games pg +JOIN game_metadata gm ON gm.game_uuid = pg.gid +WHERE pg.gid = @gid AND pg.created_at = @created_at; + +-- name: GetRematchStreak :many +SELECT DISTINCT game_uuid as gid, + CASE WHEN won = true THEN player_index + WHEN won = false THEN (1 - player_index) + ELSE -1 END as winner_idx, + created_at +FROM game_players +WHERE original_request_id = @orig_req_id::text + AND game_end_reason <> 5 -- no aborted games + -- note that cancelled games aren't saved in this table + -- and neither are ongoing games. +ORDER BY created_at DESC; + +-- name: GetRematchStreakOld :many +-- Backward-compatible query that reads from games table instead of game_players +SELECT DISTINCT uuid as gid, + winner_idx, + created_at +FROM games +WHERE quickdata->>'o' = @orig_req_id::text + AND game_end_reason <> 5 -- no aborted games + AND game_end_reason <> 3 -- no cancelled games + AND game_end_reason > 0 -- only ended games +ORDER BY created_at DESC; + +-- name: CreateGame :exec +INSERT INTO games ( + created_at, updated_at, uuid, player0_id, player1_id, timers, + started, game_end_reason, winner_idx, loser_idx, request, + history, stats, quickdata, tournament_data, tournament_id, + ready_flag, meta_events, type) +VALUES ( + @created_at, @updated_at, @uuid, @player0_id, @player1_id, @timers, + @started, @game_end_reason, @winner_idx, @loser_idx, @request, + @history, @stats, @quickdata, @tournament_data, @tournament_id, + @ready_flag, @meta_events, @type) +RETURNING id; + +-- name: UpdateGame :exec +UPDATE games +SET updated_at = @updated_at, + player0_id = @player0_id, + player1_id = @player1_id, + timers = @timers, + started = @started, + game_end_reason = @game_end_reason, + winner_idx = @winner_idx, + loser_idx = @loser_idx, + request = @request, + history = @history, + stats = @stats, + quickdata = @quickdata, + tournament_data = @tournament_data, + tournament_id = @tournament_id, + ready_flag = @ready_flag, + meta_events = @meta_events +WHERE uuid = @uuid; + + +-- name: CreateRawGame :exec +INSERT INTO games(uuid, request, history, quickdata, timers, + game_end_reason, type) +VALUES(@uuid, @request, @history, @quickdata, @timers, + @game_end_reason, @type); + +-- name: ListActiveGames :many +SELECT quickdata, request, uuid, started, tournament_data +FROM games +WHERE game_end_reason = 0; + +-- name: ListActiveTournamentGames :many +SELECT quickdata, request, uuid, started, tournament_data +FROM games +WHERE game_end_reason = 0 +AND tournament_id = @tournament_id; + +-- name: SetReady :one +UPDATE games SET ready_flag = ready_flag | (1 << @player_idx::integer) +WHERE uuid = @uuid +RETURNING ready_flag; + +-- name: ListAllIDs :many +SELECT uuid FROM games +ORDER BY created_at ASC; + +-- name: GetHistory :one +SELECT history FROM games +WHERE uuid = @uuid; + +-- name: InsertPastGame :exec +INSERT INTO past_games ( + gid, created_at, game_end_reason, winner_idx, + game_document, stats, quickdata, type +) VALUES ( + @gid, @created_at, @game_end_reason, @winner_idx, + @game_document, @stats, @quickdata, @type +); + +-- name: InsertGamePlayer :exec +INSERT INTO game_players ( + game_uuid, player_id, player_index, score, won, game_end_reason, + rating_before, rating_after, rating_delta, created_at, game_type, + opponent_id, opponent_score, original_request_id +) VALUES ( + @game_uuid, @player_id, @player_index, @score, @won, @game_end_reason, + @rating_before, @rating_after, @rating_delta, @created_at, @game_type, + @opponent_id, @opponent_score, @original_request_id +); + +-- name: UpdateGameMigrationStatus :exec +UPDATE games +SET migration_status = @migration_status, + updated_at = NOW() +WHERE uuid = @uuid; + +-- name: InsertGameMetadata :exec +INSERT INTO game_metadata ( + game_uuid, created_at, game_request, tournament_data +) VALUES ( + @game_uuid, @created_at, @game_request, @tournament_data +); + +-- name: GetGameMetadata :one +SELECT game_uuid, created_at, game_request, tournament_data +FROM game_metadata +WHERE game_uuid = @game_uuid; + +-- name: ClearGameDataAfterMigration :exec +UPDATE games +SET history = NULL, + stats = NULL, + quickdata = NULL, + timers = NULL, + meta_events = NULL, + request = NULL, + tournament_data = NULL, + player0_id = NULL, + player1_id = NULL, + updated_at = NOW() +WHERE uuid = @uuid; + +-- name: GetGamePlayers :many +SELECT player_id, player_index +FROM game_players +WHERE game_uuid = @game_uuid +ORDER BY player_index; + +-- name: GetRecentGamesByUsername :many +SELECT gp.game_uuid, gp.score, gp.opponent_score, gp.won, gp.game_end_reason, + gp.created_at, gp.game_type, u.username as opponent_username, + COALESCE(pg.quickdata, '{}') as quickdata, + gm.game_request, + gm.tournament_data, + COALESCE(pg.winner_idx, CASE WHEN gp.won = true THEN gp.player_index + WHEN gp.won = false THEN (1 - gp.player_index) + ELSE -1 END) as winner_idx +FROM game_players gp +JOIN users u ON u.id = gp.opponent_id +JOIN users player ON player.id = gp.player_id +JOIN game_metadata gm ON gm.game_uuid = gp.game_uuid +LEFT JOIN past_games pg ON pg.gid = gp.game_uuid +WHERE LOWER(player.username) = LOWER(@username) +ORDER BY gp.created_at DESC +LIMIT @num_games OFFSET @offset_games; + +-- name: GetRecentGamesByUsernameOld :many +-- Backward-compatible query that reads from games table +SELECT g.uuid as game_uuid, + CASE WHEN u1.username = @username THEN (g.quickdata->'finalScores'->>0)::int + ELSE (g.quickdata->'finalScores'->>1)::int END as score, + CASE WHEN u1.username = @username THEN (g.quickdata->'finalScores'->>1)::int + ELSE (g.quickdata->'finalScores'->>0)::int END as opponent_score, + CASE WHEN g.winner_idx = 0 AND u1.username = @username THEN true + WHEN g.winner_idx = 1 AND u2.username = @username THEN true + WHEN g.winner_idx = -1 THEN NULL + ELSE false END as won, + g.game_end_reason, + g.created_at, + g.type as game_type, + CASE WHEN u1.username = @username THEN u2.username + ELSE u1.username END as opponent_username, + g.quickdata, + g.request as game_request, + g.winner_idx +FROM games g +LEFT JOIN users u1 ON g.player0_id = u1.id +LEFT JOIN users u2 ON g.player1_id = u2.id +WHERE (LOWER(u1.username) = LOWER(@username) OR LOWER(u2.username) = LOWER(@username)) + AND g.game_end_reason > 0 -- only ended games +ORDER BY g.created_at DESC +LIMIT @num_games OFFSET @offset_games; + +-- name: GetRecentTourneyGames :many +SELECT pg.gid, pg.quickdata, gm.game_request, pg.winner_idx, pg.game_end_reason, + pg.created_at, pg.type, gm.tournament_data +FROM past_games pg +JOIN game_metadata gm ON gm.game_uuid = pg.gid +WHERE gm.tournament_data->>'Id' = @tourney_id::text +ORDER BY pg.created_at DESC +LIMIT @num_games OFFSET @offset_games; + +-- name: GetRecentTourneyGamesOld :many +-- Backward-compatible query that reads from games table +SELECT g.uuid as gid, g.quickdata, g.request as game_request, g.winner_idx, g.game_end_reason, + g.created_at, g.type, g.tournament_data +FROM games g +WHERE g.tournament_id = @tourney_id::text + AND g.game_end_reason > 0 -- only ended games +ORDER BY g.created_at DESC +LIMIT @num_games OFFSET @offset_games; + +-- name: GameExists :one +SELECT EXISTS ( + SELECT 1 FROM games WHERE uuid = @uuid +) AS exists; \ No newline at end of file diff --git a/db/queries/puzzles.sql b/db/queries/puzzles.sql index c0ac85896..6b35d4c8d 100644 --- a/db/queries/puzzles.sql +++ b/db/queries/puzzles.sql @@ -1,41 +1,43 @@ -- puzzle generation -- name: GetPotentialPuzzleGamesAvoidBots :many -SELECT games.uuid FROM games -LEFT JOIN puzzles on puzzles.game_id = games.id +SELECT past_games.gid FROM past_games +JOIN games ON past_games.gid = games.uuid +JOIN game_metadata ON game_metadata.game_uuid = past_games.gid +LEFT JOIN puzzles ON puzzles.game_id = games.id WHERE puzzles.id IS NULL - AND games.created_at BETWEEN $1 AND $2 - AND (stats->'d1'->'Challenged Phonies'->'t' = '0') - AND (stats->'d2'->'Challenged Phonies'->'t' = '0') - AND (stats->'d1'->'Unchallenged Phonies'->'t' = '0') - AND (stats->'d2'->'Unchallenged Phonies'->'t' = '0') - AND games.request LIKE $3 -- %lexicon% - AND games.request NOT LIKE '%classic_super%' - AND games.request NOT LIKE '%wordsmog%' + AND past_games.created_at BETWEEN $1 AND $2 + AND (past_games.stats->'d1'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d1'->'Unchallenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Unchallenged Phonies'->'t' = '0') + AND game_metadata.game_request->>'lexicon' = $3::text + AND game_metadata.game_request->'rules'->>'variantName' = 'classic' -- 0: none, 5: aborted, 7: canceled - AND game_end_reason not in (0, 5, 7) - AND NOT (quickdata @> '{"pi": [{"is_bot": true}]}'::jsonb) - AND type = 0 + AND past_games.game_end_reason NOT IN (0, 5, 7) + AND NOT (past_games.quickdata @> '{"pi": [{"is_bot": true}]}'::jsonb) + AND past_games.type = 0 ORDER BY games.id DESC LIMIT $4 OFFSET $5; -- name: GetPotentialPuzzleGames :many -SELECT games.uuid FROM games -LEFT JOIN puzzles on puzzles.game_id = games.id +SELECT past_games.gid FROM past_games +JOIN games ON past_games.gid = games.uuid +JOIN game_metadata ON game_metadata.game_uuid = past_games.gid +LEFT JOIN puzzles ON puzzles.game_id = games.id WHERE puzzles.id IS NULL - AND games.created_at BETWEEN $1 AND $2 - AND (stats->'d1'->'Challenged Phonies'->'t' = '0') - AND (stats->'d2'->'Challenged Phonies'->'t' = '0') - AND (stats->'d1'->'Unchallenged Phonies'->'t' = '0') - AND (stats->'d2'->'Unchallenged Phonies'->'t' = '0') - AND games.request LIKE $3 -- %lexicon% - AND games.request NOT LIKE '%classic_super%' - AND games.request NOT LIKE '%wordsmog%' + AND past_games.created_at BETWEEN $1 AND $2 + AND (past_games.stats->'d1'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d1'->'Unchallenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Unchallenged Phonies'->'t' = '0') + AND game_metadata.game_request->>'lexicon' = $3::text + AND game_metadata.game_request->'rules'->>'variantName' = 'classic' -- 0: none, 5: aborted, 7: canceled - AND game_end_reason not in (0, 5, 7) - AND type = 0 + AND past_games.game_end_reason NOT IN (0, 5, 7) + AND past_games.type = 0 ORDER BY games.id DESC LIMIT $4 OFFSET $5; \ No newline at end of file diff --git a/pkg/bus/bus.go b/pkg/bus/bus.go index d994dfed4..4cea5eb8e 100644 --- a/pkg/bus/bus.go +++ b/pkg/bus/bus.go @@ -531,6 +531,8 @@ func (b *Bus) handleNatsPublish(ctx context.Context, subtopics []string, data [] if err == nil { msgType = pb.MessageType(pnum).String() } + log.Debug().Str("msgType", msgType).Str("auth", auth).Str("userID", userID).Str("wsConnID", wsConnID). + Msg("handling-nats-publish") // XXX: Otherwise, ignore error for now. switch msgType { diff --git a/pkg/bus/gameplay.go b/pkg/bus/gameplay.go index b72d29db2..e65350129 100644 --- a/pkg/bus/gameplay.go +++ b/pkg/bus/gameplay.go @@ -243,12 +243,15 @@ func (b *Bus) readyForGame(ctx context.Context, evt *pb.ReadyForGame, userID str if rf == (1<3 months old) - 3-5 second access time acceptable +3. **Analytics**: Athena - Complex historical queries + +## Implementation Strategy + +### 1. Enhanced Migration Status + +```go +const ( + MigrationStatusNotMigrated = 0 // Legacy games not yet migrated + MigrationStatusMigrated = 1 // In past_games table + MigrationStatusCleaned = 2 // Data cleared from games table + MigrationStatusArchived = 3 // Partition moved to S3 +) +``` + +### 2. S3 Storage Structure + +``` +s3://liwords-game-archive/ +├── partitions/ +│ ├── year=2024/ +│ │ ├── month=01/ +│ │ │ ├── partition_metadata.json +│ │ │ ├── games_2024_01.parquet.gz +│ │ │ └── checksums.sha256 +│ │ ├── month=02/ +│ │ └── ... +│ └── year=2025/ +├── athena_schemas/ +│ └── game_partitions.sql +└── backup/ + └── redundant_copies/ +``` + +#### Partition Metadata Format +```json +{ + "partition_name": "past_games_2024_01", + "year": 2024, + "month": 1, + "game_count": 45230, + "date_range": { + "start": "2024-01-01T00:00:00Z", + "end": "2024-01-31T23:59:59Z" + }, + "file_info": { + "original_size_mb": 2340, + "compressed_size_mb": 234, + "compression_ratio": 0.1, + "checksum": "sha256:abc123..." + }, + "archived_at": "2024-04-15T10:30:00Z", + "schema_version": "v2" +} +``` + +### 3. Partition Archival Process + +```go +type PartitionArchiver struct { + db *pgxpool.Pool + s3 *s3.Client + config *config.Config +} + +func (pa *PartitionArchiver) ArchivePartition(ctx context.Context, partitionName string) error { + // 1. Validate partition is old enough (>3 months) + if err := pa.validatePartitionAge(partitionName); err != nil { + return fmt.Errorf("partition validation failed: %w", err) + } + + // 2. Create backup transaction + tx, err := pa.db.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + return err + } + defer tx.Rollback(ctx) + + // 3. DETACH partition (makes it independent table) + _, err = tx.Exec(ctx, fmt.Sprintf("ALTER TABLE past_games DETACH PARTITION %s", partitionName)) + if err != nil { + return fmt.Errorf("failed to detach partition: %w", err) + } + + // 4. Export to Parquet format (Athena compatible) + parquetData, metadata, err := pa.exportPartitionToParquet(ctx, tx, partitionName) + if err != nil { + return fmt.Errorf("failed to export partition: %w", err) + } + + // 5. Upload to S3 with compression and redundancy + s3Key := fmt.Sprintf("partitions/year=%d/month=%d/games_%s.parquet.gz", + metadata.Year, metadata.Month, partitionName) + if err := pa.uploadToS3WithRetry(ctx, s3Key, parquetData, metadata); err != nil { + return fmt.Errorf("failed to upload to S3: %w", err) + } + + // 6. Verify S3 upload integrity + if err := pa.verifyS3Upload(ctx, s3Key, metadata.Checksum); err != nil { + return fmt.Errorf("S3 upload verification failed: %w", err) + } + + // 7. Update migration status for all games in partition + _, err = tx.Exec(ctx, ` + UPDATE games + SET migration_status = $1, updated_at = NOW() + WHERE created_at >= $2 AND created_at < $3`, + MigrationStatusArchived, metadata.DateRange.Start, metadata.DateRange.End) + if err != nil { + return fmt.Errorf("failed to update migration status: %w", err) + } + + // 8. Commit transaction + if err := tx.Commit(ctx); err != nil { + return fmt.Errorf("failed to commit transaction: %w", err) + } + + // 9. DROP the detached partition (point of no return) + _, err = pa.db.Exec(ctx, fmt.Sprintf("DROP TABLE %s", partitionName)) + if err != nil { + log.Error().Err(err).Str("partition", partitionName). + Msg("Failed to drop partition after successful archival - manual cleanup needed") + // Don't return error - archival was successful + } + + log.Info().Str("partition", partitionName).Int("games", metadata.GameCount). + Float64("compression_ratio", metadata.FileInfo.CompressionRatio). + Msg("Partition successfully archived to S3") + + return nil +} +``` + +### 4. Game Retrieval System + +#### Enhanced Get Method +```go +func (s *DBStore) Get(ctx context.Context, id string) (*entity.Game, error) { + // 1. Always get basic info from games table first + basicInfo, err := s.queries.GetBasicGameInfo(ctx, common.ToPGTypeText(id)) + if err != nil { + return nil, fmt.Errorf("game not found: %w", err) + } + + // 2. Route based on migration status + switch basicInfo.MigrationStatus { + case MigrationStatusNotMigrated, MigrationStatusCleaned: + // Legacy path - still in games table + return s.inProgressGame(basicInfo, true) + + case MigrationStatusMigrated: + // Fast path - in past_games table + return s.getFromPastGames(ctx, basicInfo, true) + + case MigrationStatusArchived: + // Slow path - in S3 archive + return s.getFromS3Archive(ctx, basicInfo) + + default: + return nil, fmt.Errorf("unknown migration status: %d", basicInfo.MigrationStatus) + } +} +``` + +#### S3 Retrieval Implementation +```go +func (s *DBStore) getFromS3Archive(ctx context.Context, basicInfo BasicGameInfo) (*entity.Game, error) { + // 1. Check Redis cache first + if cachedGame := s.getCachedS3Game(basicInfo.UUID); cachedGame != nil { + return cachedGame, nil + } + + // 2. Calculate S3 partition path + year, month := basicInfo.CreatedAt.Year(), basicInfo.CreatedAt.Month() + s3Key := fmt.Sprintf("partitions/year=%d/month=%d/games_%d_%02d.parquet.gz", + year, month, year, month) + + // 3. Try Athena query first (faster for single game) + gameDoc, err := s.queryAthenaForGame(ctx, basicInfo.UUID, year, month) + if err != nil { + log.Warn().Err(err).Str("game", basicInfo.UUID). + Msg("Athena query failed, falling back to direct S3") + + // 4. Fallback: Download and scan S3 partition + gameDoc, err = s.scanS3PartitionForGame(ctx, s3Key, basicInfo.UUID) + if err != nil { + return nil, fmt.Errorf("failed to retrieve game from S3: %w", err) + } + } + + // 5. Convert GameDocument back to entity.Game + entGame, err := s.gameDocumentToEntityGame(gameDoc, basicInfo) + if err != nil { + return nil, fmt.Errorf("failed to convert game document: %w", err) + } + + // 6. Cache in Redis for future requests + s.cacheS3Game(basicInfo.UUID, entGame, 1*time.Hour) + + return entGame, nil +} +``` + +### 5. Athena Integration + +#### Table Creation +```sql +-- Create external table for Athena queries +CREATE EXTERNAL TABLE game_archive ( + gid string, + created_at timestamp, + game_end_reason smallint, + winner_idx smallint, + game_request string, + game_document string, + stats string, + quickdata string, + tournament_data string +) +PARTITIONED BY ( + year int, + month int +) +STORED AS PARQUET +LOCATION 's3://liwords-game-archive/partitions/' +TBLPROPERTIES ( + 'projection.enabled' = 'true', + 'projection.year.type' = 'integer', + 'projection.year.range' = '2020,2030', + 'projection.month.type' = 'integer', + 'projection.month.range' = '1,12', + 'projection.year.interval' = '1', + 'projection.month.interval' = '1' +); +``` + +#### Athena Query Service +```go +type AthenaQuerier struct { + client *athena.Client + bucket string + database string +} + +func (aq *AthenaQuerier) GetGameFromArchive(ctx context.Context, gameID string, year, month int) (*pb.GameDocument, error) { + query := fmt.Sprintf(` + SELECT game_document + FROM game_archive + WHERE gid = '%s' + AND year = %d + AND month = %d + LIMIT 1 + `, gameID, year, month) + + result, err := aq.executeQuery(ctx, query) + if err != nil { + return nil, err + } + + if len(result.Rows) == 0 { + return nil, fmt.Errorf("game not found in archive") + } + + var gameDoc pb.GameDocument + if err := protojson.Unmarshal([]byte(result.Rows[0]["game_document"]), &gameDoc); err != nil { + return nil, fmt.Errorf("failed to unmarshal game document: %w", err) + } + + return &gameDoc, nil +} +``` + +### 6. Robustness & Error Handling + +#### Graceful Degradation +```go +func (s *DBStore) GetWithFallback(ctx context.Context, id string) (*entity.Game, error) { + game, err := s.Get(ctx, id) + if err != nil { + // If S3 retrieval fails, return basic metadata with error indication + basicInfo, basicErr := s.queries.GetBasicGameInfo(ctx, common.ToPGTypeText(id)) + if basicErr != nil { + return nil, fmt.Errorf("game not found: %w", basicErr) + } + + // Return skeleton game with error state + return &entity.Game{ + CreatedAt: basicInfo.CreatedAt, + Type: pb.GameType(basicInfo.Type), + // Set error flag for UI to show "Game temporarily unavailable" + GameEndReason: pb.GameEndReason_TEMPORARILY_UNAVAILABLE, + }, nil + } + return game, nil +} +``` + +#### Cache Strategy +```go +type S3GameCache struct { + redis *redis.Client +} + +func (c *S3GameCache) Get(gameID string) (*entity.Game, error) { + key := fmt.Sprintf("s3game:%s", gameID) + data, err := c.redis.Get(key).Bytes() + if err != nil { + return nil, err + } + + var game entity.Game + if err := proto.Unmarshal(data, &game); err != nil { + return nil, err + } + + return &game, nil +} + +func (c *S3GameCache) Set(gameID string, game *entity.Game, ttl time.Duration) error { + key := fmt.Sprintf("s3game:%s", gameID) + data, err := proto.Marshal(game) + if err != nil { + return err + } + + return c.redis.Set(key, data, ttl).Err() +} +``` + +### 7. Monitoring & Observability + +#### Key Metrics +```go +type ArchivalMetrics struct { + PartitionsArchived prometheus.Counter + S3RetrievalLatency prometheus.Histogram + S3RetrievalErrors prometheus.Counter + CacheHitRate prometheus.Gauge + AthenaQueryLatency prometheus.Histogram + ArchivalDuration prometheus.Histogram +} + +func (m *ArchivalMetrics) RecordS3Retrieval(duration time.Duration, success bool) { + m.S3RetrievalLatency.Observe(duration.Seconds()) + if !success { + m.S3RetrievalErrors.Inc() + } +} +``` + +#### Health Checks +```go +func (s *DBStore) HealthCheckS3Archive(ctx context.Context) error { + // Test retrieval of a known archived game + testGameID := "test-game-archive-check" + + start := time.Now() + _, err := s.getFromS3Archive(ctx, BasicGameInfo{ + UUID: testGameID, + CreatedAt: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC), + MigrationStatus: MigrationStatusArchived, + }) + + duration := time.Since(start) + if duration > 10*time.Second { + return fmt.Errorf("S3 retrieval too slow: %v", duration) + } + + return err +} +``` + +### 8. Batch Operations + +#### Efficient Multiple Game Retrieval +```go +func (s *DBStore) GetMultipleGamesFromS3(ctx context.Context, gameIDs []string) (map[string]*entity.Game, error) { + // Group games by S3 partition to minimize downloads + partitionGroups := make(map[string][]string) + + for _, gameID := range gameIDs { + basicInfo, err := s.queries.GetBasicGameInfo(ctx, common.ToPGTypeText(gameID)) + if err != nil { + continue + } + + if basicInfo.MigrationStatus == MigrationStatusArchived { + partitionKey := fmt.Sprintf("%d_%02d", + basicInfo.CreatedAt.Year(), basicInfo.CreatedAt.Month()) + partitionGroups[partitionKey] = append(partitionGroups[partitionKey], gameID) + } + } + + results := make(map[string]*entity.Game) + + // Process each partition once + for partitionKey, gameIDsInPartition := range partitionGroups { + games, err := s.getMultipleGamesFromPartition(ctx, partitionKey, gameIDsInPartition) + if err != nil { + log.Error().Err(err).Str("partition", partitionKey). + Msg("Failed to retrieve games from partition") + continue + } + + for gameID, game := range games { + results[gameID] = game + } + } + + return results, nil +} +``` + +### 9. Migration Timeline & Process + +#### Phase 2A: Infrastructure Setup (Month 1) +1. Implement S3 archival infrastructure +2. Create Athena table definitions +3. Set up monitoring and alerting +4. Build cache layer +5. Implement graceful degradation + +#### Phase 2B: Testing (Month 2) +1. Archive oldest partitions in test environment +2. Validate data integrity +3. Performance testing +4. Load testing with S3 retrieval +5. Cache effectiveness analysis + +#### Phase 2C: Production Rollout (Month 3) +1. Archive partitions >6 months old (very safe) +2. Monitor for 2 weeks +3. Archive partitions >4 months old +4. Monitor for 2 weeks +5. Achieve steady state: Archive partitions >3 months old + +#### Phase 2D: Automation (Month 4) +1. Automated monthly archival process +2. Automated monitoring and alerting +3. Self-healing capabilities +4. Performance optimization based on usage patterns + +### 10. Expected Outcomes + +#### Storage Savings +- Database size reduction: **85-90%** +- From ~40GB to ~4-6GB active data +- S3 storage cost: ~10% of database storage cost + +#### Performance Impact +- 99%+ queries remain fast (recent games) +- 1% queries (old games) take 3-5 seconds +- Cache can reduce repeated old game access to <1 second + +#### Operational Benefits +- Faster database backups and maintenance +- Better query performance on active data +- Historical analytics via Athena +- Significant cost reduction + +## Implementation Checklist + +### Prerequisites +- [ ] Phase 1 stable for 2+ months +- [ ] S3 infrastructure configured +- [ ] Athena setup complete +- [ ] Redis cache available +- [ ] Monitoring infrastructure ready + +### Core Implementation +- [ ] Enhanced migration status constants +- [ ] S3 archival service +- [ ] Partition export to Parquet +- [ ] S3 upload with compression +- [ ] Athena query service +- [ ] Enhanced Get() method with S3 support +- [ ] Cache layer implementation +- [ ] Error handling and graceful degradation + +### Testing & Validation +- [ ] Unit tests for all components +- [ ] Integration tests with S3 +- [ ] Performance benchmarks +- [ ] Data integrity validation +- [ ] Failover scenario testing + +### Monitoring & Operations +- [ ] Metrics collection +- [ ] Alerting setup +- [ ] Health checks +- [ ] Automated archival process +- [ ] Documentation for operations team + +--- + +**Next Steps**: When ready to implement, start with the infrastructure components and build incrementally, testing each component thoroughly before moving to the next. \ No newline at end of file diff --git a/pkg/stores/game/README.md b/pkg/stores/game/README.md new file mode 100644 index 000000000..f714b3cdc --- /dev/null +++ b/pkg/stores/game/README.md @@ -0,0 +1,114 @@ +May 7, 2025 + +### Evolution of game store + +Our game store is a model in the database. DDL: + +```sql +-- public.games definition +CREATE TABLE public.games ( + id serial4 NOT NULL, + created_at timestamptz NULL, + updated_at timestamptz NULL, + deleted_at timestamptz NULL, + "uuid" varchar(24) NULL, + player0_id int4 NULL, + player1_id int4 NULL, + timers jsonb NULL, + started bool NULL, + game_end_reason int4 NULL, + winner_idx int4 NULL, + loser_idx int4 NULL, + request bytea NULL, + history bytea NULL, + stats jsonb NULL, + quickdata jsonb NULL, + tournament_data jsonb NULL, + tournament_id text NULL, + ready_flag int8 NULL, + meta_events jsonb NULL, + "type" int4 NULL, + game_request jsonb DEFAULT '{}'::jsonb NOT NULL, + history_in_s3 bool DEFAULT false NOT NULL, + CONSTRAINT games_pkey PRIMARY KEY (id) +); +CREATE INDEX hastybot_games_index ON public.games USING btree (id) WHERE ((game_end_reason <> ALL ('{0,5,7}'::integer[])) AND ((player0_id = 230) OR (player1_id = 230))); +CREATE INDEX idx_game_creation_date ON public.games USING btree (created_at); +CREATE INDEX idx_games_deleted_at ON public.games USING btree (deleted_at); +CREATE INDEX idx_games_game_end_reason ON public.games USING btree (game_end_reason); +CREATE INDEX idx_games_player0_id ON public.games USING btree (player0_id); +CREATE INDEX idx_games_player1_id ON public.games USING btree (player1_id); +CREATE INDEX idx_games_tournament_id ON public.games USING btree (tournament_id); +CREATE INDEX idx_games_uuid ON public.games USING btree (uuid); +CREATE INDEX rematch_req_idx ON public.games USING hash (((quickdata ->> 'o'::text))); + + +-- public.games foreign keys + +ALTER TABLE public.games ADD CONSTRAINT fk_games_player0 FOREIGN KEY (player0_id) REFERENCES public.users(id); +ALTER TABLE public.games ADD CONSTRAINT fk_games_player1 FOREIGN KEY (player1_id) REFERENCES public.users(id); +``` + +It is (currently) over 8M games and takes up close to 40G in the database. This is +very high for a table where the large majority of rows are forgotten about. Yet we still wish to keep the history for old games. Queries on this table are often very slow. + +We are going to do a multi-phase migration to another structure. + +#### partitioned games table + +The original `games` table will be used for ongoing, unfinished games. We will also create a partitioned table for past games: + +```sql +CREATE TABLE past_games ( + gid text NOT NULL, + created_at timestamp with time zone, + game_end_reason SMALLINT, + winner_idx SMALLINT, -- 0, 1 for first or second, -1 for draw + game_request jsonb NOT NULL DEFAULT '{}', + game_document jsonb NOT NULL DEFAULT '{}', + stats jsonb NOT NULL DEFAULT '{}', + quickdata jsonb NOT NULL DEFAULT '{}', + tournament_data jsonb -- can be null. contains an Id column +) PARTITION BY RANGE (created_at); + + +CREATE INDEX idx_past_games_tournament_id ON public.past_games USING hash(((tournament_data ->>'Id'::text))); +CREATE INDEX idx_past_games_gid ON public.games USING btree (gid); +CREATE INDEX idx_past_games_rematch_req_idx ON public.past_games USING hash (((quickdata ->> 'o'::text))); + +``` + +##### Phase 1: + +We will create partitions for every month (based on game `created_at`, in UTC). Upon completion of a game, the relevant data will be copied to a new row in past_games, and then we will delete everything but the most basic metadata from the original game (by setting columns to NULL as needed). + +We will also create a new row in the `game_players` table for each player, upon completion of the game. This will allow for historical queries. + + +Basic data to keep in `games`: + +- `id` +- `created_at` (needed for looking up proper partition in future) +- `uuid` (it is not actually a uuid, but a short string ID) +- `type` + +We should also delete indexes from `games` that we won't need any longer, like the rematch_req_idx (moved to past_games), and so on. + +Partitions should be made with a periodic maintenance task. + +##### Phase 2: + +We can offload old partitions, > 3 months or so, to S3, with a cron task. + +- DETACH partition +- SELECT entire table, gzip, upload to S3 +- If a user requests a game (any metadata beyond the basic one listed above): + - Look for the `id` in `games` + - If found, look for the game in `past_games` + - If it's not in `past_games`, it's in S3. Use the date to fashion the proper Athena query to fetch the data from this game. + +Of course we have to make the Athena indexes and all of that. + +What does this mean for data? + +- If we have Head-to-Head or other similar stats, we can only calculate the most recent 3 months' worth. This is probably OK. This can improve when we learn how to query the long-term data store. \ No newline at end of file diff --git a/pkg/stores/game/cache.go b/pkg/stores/game/cache.go index 2da132330..47ae2d194 100644 --- a/pkg/stores/game/cache.go +++ b/pkg/stores/game/cache.go @@ -26,12 +26,12 @@ type backingStore interface { CreateRaw(context.Context, *entity.Game, pb.GameType) error Exists(context.Context, string) (bool, error) ListActive(ctx context.Context, tourneyID string) (*pb.GameInfoResponses, error) - Count(ctx context.Context) (int64, error) GameEventChan() chan<- *entity.EventWrapper SetGameEventChan(ch chan<- *entity.EventWrapper) Disconnect() SetReady(ctx context.Context, gid string, pidx int) (int, error) GetHistory(ctx context.Context, id string) (*macondopb.GameHistory, error) + MigrateGameToPastGames(ctx context.Context, g *entity.Game, ratingsBefore, ratingsAfter map[string]int32) error } const ( @@ -217,10 +217,6 @@ func (c *Cache) listAllActive(ctx context.Context) (*pb.GameInfoResponses, error return games, err } -func (c *Cache) Count(ctx context.Context) (int64, error) { - return c.backing.Count(ctx) -} - func (c *Cache) CachedCount(ctx context.Context) int { return c.cache.Len() } @@ -236,3 +232,7 @@ func (c *Cache) SetReady(ctx context.Context, gid string, pidx int) (int, error) func (c *Cache) GetHistory(ctx context.Context, id string) (*macondopb.GameHistory, error) { return c.backing.GetHistory(ctx, id) } + +func (c *Cache) MigrateGameToPastGames(ctx context.Context, g *entity.Game, ratingsBefore, ratingsAfter map[string]int32) error { + return c.backing.MigrateGameToPastGames(ctx, g, ratingsBefore, ratingsAfter) +} diff --git a/pkg/stores/game/db.go b/pkg/stores/game/db.go index 5943dcda4..c20874820 100644 --- a/pkg/stores/game/db.go +++ b/pkg/stores/game/db.go @@ -2,26 +2,23 @@ package game import ( "context" - "database/sql" "encoding/json" "errors" "fmt" - "sort" + "os" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" "github.com/jackc/pgx/v5/pgxpool" "github.com/rs/zerolog/log" + "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/timestamppb" - "gorm.io/datatypes" - "gorm.io/driver/postgres" - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/plugin/opentelemetry/tracing" - "github.com/woogles-io/liwords/pkg/config" "github.com/woogles-io/liwords/pkg/entity" + "github.com/woogles-io/liwords/pkg/entity/utilities" "github.com/woogles-io/liwords/pkg/stores/common" "github.com/woogles-io/liwords/pkg/stores/models" @@ -32,14 +29,60 @@ import ( pb "github.com/woogles-io/liwords/rpc/api/proto/ipc" ) +// GameRequest utility functions for handling both proto and protojson formats + +// ParseGameRequest parses GameRequest from bytes, trying proto format first, then protojson +func ParseGameRequest(data []byte) (*pb.GameRequest, error) { + if len(data) == 0 { + return &pb.GameRequest{}, nil + } + + gr := &pb.GameRequest{} + + // Try proto format first (binary data from live games) + err := proto.Unmarshal(data, gr) + if err == nil { + return gr, nil + } + + // Fall back to protojson format (from past games) + err = protojson.Unmarshal(data, gr) + if err != nil { + return nil, fmt.Errorf("failed to parse GameRequest as both proto and protojson: %w", err) + } + + return gr, nil +} + +// MarshalGameRequestAsProto marshals GameRequest as binary proto for live games table +func MarshalGameRequestAsProto(gr *pb.GameRequest) ([]byte, error) { + if gr == nil { + return nil, fmt.Errorf("GameRequest is nil") + } + return proto.Marshal(gr) +} + +// MarshalGameRequestAsJSON marshals GameRequest as protojson for past games table +func MarshalGameRequestAsJSON(gr *pb.GameRequest) ([]byte, error) { + if gr == nil { + return nil, fmt.Errorf("GameRequest is nil") + } + return protojson.Marshal(gr) +} + const ( MaxRecentGames = 1000 + + // Migration status constants + MigrationStatusNotMigrated = 0 + MigrationStatusMigrated = 1 + MigrationStatusCleaned = 2 ) // DBStore is a postgres-backed store for games. type DBStore struct { cfg *config.Config - db *gorm.DB + dbPool *pgxpool.Pool queries *models.Queries userStore pkguser.Store @@ -48,60 +91,75 @@ type DBStore struct { // from the database. // All game events go down the same channel. gameEventChan chan<- *entity.EventWrapper + + // Feature flag to control whether to use past_games table + // When false, uses old queries against games table + // When true, uses new queries against past_games/game_players tables + usePastGamesTable bool } -type game struct { - gorm.Model - UUID string `gorm:"type:varchar(24);index"` +// type game struct { +// gorm.Model +// UUID string `gorm:"type:varchar(24);index"` - Type pb.GameType - Player0ID uint `gorm:"foreignKey;index"` - // Player0 user.User +// Type pb.GameType +// Player0ID uint `gorm:"foreignKey;index"` +// // Player0 user.User - Player1ID uint `gorm:"foreignKey;index"` - // Player1 user.User +// Player1ID uint `gorm:"foreignKey;index"` +// // Player1 user.User - ReadyFlag uint // When both players are ready, this game starts. +// ReadyFlag uint // When both players are ready, this game starts. - Timers datatypes.JSON // A JSON blob containing the game timers. +// Timers datatypes.JSON // A JSON blob containing the game timers. - Started bool - GameEndReason int `gorm:"index"` - WinnerIdx int - LoserIdx int +// Started bool +// GameEndReason int `gorm:"index"` +// WinnerIdx int +// LoserIdx int - Quickdata datatypes.JSON // A JSON blob containing the game quickdata. +// Quickdata datatypes.JSON // A JSON blob containing the game quickdata. - // Protobuf representations of the game request and history. - Request []byte - History []byte - // Meta Events (abort, adjourn, adjudicate, etc requests) - MetaEvents datatypes.JSON +// // Protobuf representations of the game request and history. +// Request []byte +// History []byte +// // Meta Events (abort, adjourn, adjudicate, etc requests) +// MetaEvents datatypes.JSON - Stats datatypes.JSON +// Stats datatypes.JSON - // This is purposefully not a foreign key. It can be empty/NULL for - // most games. - TournamentID string `gorm:"index"` - TournamentData datatypes.JSON -} +// // This is purposefully not a foreign key. It can be empty/NULL for +// // most games. +// TournamentID string `gorm:"index"` +// TournamentData datatypes.JSON +// } // NewDBStore creates a new DB store for games. func NewDBStore(config *config.Config, userStore pkguser.Store, dbPool *pgxpool.Pool) (*DBStore, error) { - - db, err := gorm.Open(postgres.Open(config.DBConnDSN), &gorm.Config{Logger: common.GormLogger}) - if err != nil { - return nil, err - } - if err := db.Use(tracing.NewPlugin()); err != nil { - return nil, err - } // Note: We need to manually add the following index on production: // create index rematch_req_idx ON games using hash ((quickdata->>'o')); - // I don't know how to do this with GORM. This makes the GetRematchStreak function - // much faster. - return &DBStore{db: db, cfg: config, userStore: userStore, queries: models.New(dbPool)}, nil + // Check environment variable for feature flag + // Default to false (use old queries) for backward compatibility + usePastGames := false + if envVal := os.Getenv("USE_PAST_GAMES_TABLE"); envVal == "true" { + usePastGames = true + log.Info().Bool("use_past_games", usePastGames).Msg("past-games-table-feature-flag") + } + + return &DBStore{ + cfg: config, + dbPool: dbPool, + userStore: userStore, + queries: models.New(dbPool), + usePastGamesTable: usePastGames, + }, nil +} + +// SetUsePastGamesTable allows runtime configuration of the feature flag +func (s *DBStore) SetUsePastGamesTable(use bool) { + s.usePastGamesTable = use + log.Info().Bool("use_past_games", use).Msg("updated-past-games-table-feature-flag") } // SetGameEventChan sets the game event channel to the passed in channel. @@ -120,13 +178,53 @@ func (s *DBStore) GameEventChan() chan<- *entity.EventWrapper { // Only API nodes that have this game in its cache should respond to requests. // XXX: The above comment is obsolete and we will likely redo the way we do caches in the future. func (s *DBStore) Get(ctx context.Context, id string) (*entity.Game, error) { - - g, err := s.queries.GetGame(ctx, common.ToPGTypeText(id)) + // First get basic info to check migration status without unmarshaling nullable fields + basicInfo, err := s.queries.GetGameBasicInfo(ctx, common.ToPGTypeText(id)) if err != nil { - log.Err(err).Msg("error-get-game") + log.Err(err).Msg("error-get-game-basic-info") return nil, err } + + // Check if the game has ended + if basicInfo.GameEndReason.Valid && basicInfo.GameEndReason.Int32 != int32(pb.GameEndReason_NONE) { + // Game has ended, check migration status + if basicInfo.MigrationStatus.Valid && basicInfo.MigrationStatus.Int16 >= MigrationStatusMigrated { + // Game has been migrated to past_games, fetch from there + // We need to create a minimal Game struct to pass to getFromPastGames + g := models.Game{ + ID: basicInfo.ID, + Uuid: basicInfo.Uuid, + CreatedAt: basicInfo.CreatedAt, + UpdatedAt: basicInfo.UpdatedAt, + } + return s.getFromPastGames(ctx, g, true) + } else { + // Game ended but not yet migrated (legacy data) + // Try to get full data - this should work for non-migrated games + fullGame, err := s.queries.GetGameFullData(ctx, common.ToPGTypeText(id)) + if err != nil { + log.Err(err).Msg("error-get-game-full-data") + return nil, err + } + return s.inProgressGame(fullGame, true) + } + } else { + // Game is still in progress, get full data + fullGame, err := s.queries.GetGameFullData(ctx, common.ToPGTypeText(id)) + if err != nil { + log.Err(err).Msg("error-get-game-full-data-in-progress") + return nil, err + } + return s.inProgressGame(fullGame, true) + } +} + +func (s *DBStore) inProgressGame(g models.Game, playTurns bool) (*entity.Game, error) { // convert to an entity.Game + gr, err := ParseGameRequest(g.Request) + if err != nil { + return nil, err + } entGame := &entity.Game{ Started: g.Started.Bool, Timers: g.Timers, @@ -142,29 +240,33 @@ func (s *DBStore) Get(ctx context.Context, id string) (*entity.Game, error) { Type: pb.GameType(g.Type.Int32), DBID: uint(g.ID), TournamentData: &g.TournamentData, - GameReq: &g.Request, // move to g.GameRequest after JSON migration - + GameReq: &entity.GameRequest{GameRequest: gr}, } entGame.SetTimerModule(&entity.GameTimer{}) - - // Then unmarshal the history and start a game from it. - hist := &macondopb.GameHistory{} - err = proto.Unmarshal(g.History, hist) - if err != nil { - return nil, err + if playTurns { + // Then unmarshal the history and start a game from it. + hist := &macondopb.GameHistory{} + err := proto.Unmarshal(g.History, hist) + if err != nil { + return nil, err + } + log.Debug().Interface("hist", hist).Msg("hist-unmarshal") + return s.playHistory(entGame, hist) } - log.Debug().Interface("hist", hist).Msg("hist-unmarshal") + return entGame, nil +} +func (s *DBStore) playHistory(entGame *entity.Game, hist *macondopb.GameHistory) (*entity.Game, error) { lexicon := hist.Lexicon if lexicon == "" { // This can happen for some early games where we didn't migrate this. - lexicon = g.Request.Lexicon + lexicon = entGame.GameReq.Lexicon } rules, err := macondogame.NewBasicGameRules( - s.cfg.MacondoConfig(), lexicon, g.Request.Rules.BoardLayoutName, - g.Request.Rules.LetterDistributionName, macondogame.CrossScoreOnly, - macondogame.Variant(g.Request.Rules.VariantName)) + s.cfg.MacondoConfig(), lexicon, entGame.GameReq.Rules.BoardLayoutName, + entGame.GameReq.Rules.LetterDistributionName, macondogame.CrossScoreOnly, + macondogame.Variant(entGame.GameReq.Rules.VariantName)) if err != nil { return nil, err } @@ -208,67 +310,370 @@ func (s *DBStore) Get(ctx context.Context, id string) (*entity.Game, error) { return entGame, nil } +func (s *DBStore) getFromPastGames(ctx context.Context, g models.Game, playTurns bool) (*entity.Game, error) { + gid := g.Uuid.String + createdAt := g.CreatedAt.Time + + pastgame, err := s.queries.GetPastGame(ctx, models.GetPastGameParams{ + Gid: gid, + CreatedAt: pgtype.Timestamptz{Time: createdAt, Valid: true}, + }) + if err != nil { + log.Err(err).Msg("error-get-past-game") + return nil, err + } + + // Get player IDs from game_players table + players, err := s.queries.GetGamePlayers(ctx, gid) + if err != nil { + log.Err(err).Msg("error-get-game-players") + return nil, err + } + + // Map players by index + var playerDBIDs [2]uint + for _, p := range players { + if p.PlayerIndex >= 0 && p.PlayerIndex < 2 { + playerDBIDs[p.PlayerIndex] = uint(p.PlayerID) + } + } + // Get GameRequest and TournamentData from game_metadata + metadata, err := s.queries.GetGameMetadata(ctx, gid) + if err != nil { + log.Err(err).Msg("error-get-game-metadata") + return nil, err + } + + gr, err := ParseGameRequest(metadata.GameRequest) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-request") + return nil, err + } + + // Parse tournament data from metadata + var tournamentData *entity.TournamentData + if metadata.TournamentData != nil { + var td entity.TournamentData + if err := json.Unmarshal(metadata.TournamentData, &td); err == nil { + tournamentData = &td + } + } + if tournamentData == nil { + tournamentData = &entity.TournamentData{} + } + + // convert to an entity.Game + entGame := &entity.Game{ + CreatedAt: pastgame.CreatedAt.Time, + GameEndReason: pb.GameEndReason(pastgame.GameEndReason), + GameReq: &entity.GameRequest{GameRequest: gr}, + Stats: &pastgame.Stats, + Quickdata: &pastgame.Quickdata, + Type: pb.GameType(pastgame.Type), + TournamentData: tournamentData, + PlayerDBIDs: playerDBIDs, + ChangeHook: s.gameEventChan, + DBID: uint(g.ID), // Keep the original game ID + } + + entGame.SetTimerModule(&entity.GameTimer{}) + + winnerIdx := pastgame.WinnerIdx + if winnerIdx.Valid { + entGame.WinnerIdx = int(winnerIdx.Int16) + switch entGame.WinnerIdx { + case 0: + entGame.LoserIdx = 1 + case 1: + entGame.LoserIdx = 0 + case -1: + entGame.LoserIdx = -1 + default: + log.Err(fmt.Errorf("invalid winner index: %d", entGame.WinnerIdx)).Msg("invalid-winner-index") + return nil, fmt.Errorf("invalid winner index: %d", entGame.WinnerIdx) + } + } + + if playTurns { + docbts := pastgame.GameDocument + if docbts != nil { + doc := &pb.GameDocument{} + err = protojson.Unmarshal(docbts, doc) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-document") + return nil, err + } + gh, err := utilities.ToGameHistory(doc, s.cfg) + if err != nil { + log.Err(err).Msg("error-converting-game-document") + return nil, err + } + return s.playHistory(entGame, gh) + } + return nil, fmt.Errorf("game document is nil") + } + + return entGame, nil +} + // GetMetadata gets metadata about the game, but does not actually play the game. func (s *DBStore) GetMetadata(ctx context.Context, id string) (*pb.GameInfoResponse, error) { - g := &game{} + // First get basic info to check migration status without unmarshaling nullable fields + basicInfo, err := s.queries.GetGameBasicInfo(ctx, common.ToPGTypeText(id)) + if err != nil { + log.Err(err).Msg("error-get-game-basic-info-in-get-metadata") + return nil, err + } + + // Check if the game has ended and been migrated + if basicInfo.GameEndReason.Valid && basicInfo.GameEndReason.Int32 != int32(pb.GameEndReason_NONE) { + // Game has ended, check migration status + if basicInfo.MigrationStatus.Valid && basicInfo.MigrationStatus.Int16 >= MigrationStatusMigrated { + // Game has been migrated to past_games, fetch from there directly + pastMeta, err := s.queries.GetPastGameMetadata(ctx, models.GetPastGameMetadataParams{ + Gid: id, + CreatedAt: pgtype.Timestamptz{Time: basicInfo.CreatedAt.Time, Valid: true}, + }) + if err != nil { + log.Err(err).Msg("error-get-past-game-metadata-migrated") + return nil, err + } + + gr, err := ParseGameRequest(pastMeta.GameRequest) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-request") + return nil, err + } + + // Get time control name + timefmt := entity.TCRegular + if gr != nil { + tc, _, err := entity.VariantFromGameReq(gr) + if err == nil { + timefmt = tc + } + } + + winner := int32(-1) + if pastMeta.WinnerIdx.Valid { + winner = int32(pastMeta.WinnerIdx.Int16) + } - result := s.db.Where("uuid = ?", id).First(g) - if result.Error != nil { - return nil, result.Error + // Parse tournament info from JSONB + var tourneyID string + if pastMeta.TournamentData != nil { + var td entity.TournamentData + if err := json.Unmarshal(pastMeta.TournamentData, &td); err == nil && td.Id != "" { + tourneyID = td.Id + } + } + + return &pb.GameInfoResponse{ + Players: pastMeta.Quickdata.PlayerInfo, + GameEndReason: pb.GameEndReason(pastMeta.GameEndReason), + Scores: pastMeta.Quickdata.FinalScores, + Winner: winner, + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(basicInfo.CreatedAt.Time), + LastUpdate: timestamppb.New(basicInfo.UpdatedAt.Time), + GameId: id, + GameRequest: gr, + TournamentId: tourneyID, + Type: pb.GameType(pastMeta.Type), + }, nil + } } - return convertGameToInfoResponse(g) + // Game is either in progress or ended but not migrated yet + // Try to get metadata from games table + g, err := s.queries.GetLiveGameMetadata(ctx, common.ToPGTypeText(id)) + if err != nil { + // If this fails for an ended game, it might be a legacy migration without proper status + // (migrated to past_games but migration_status not set), so try past_games as fallback + if err == pgx.ErrNoRows || basicInfo.GameEndReason.Valid && basicInfo.GameEndReason.Int32 != int32(pb.GameEndReason_NONE) { + // Try to get from past_games as fallback for legacy migrations + pastMeta, err := s.queries.GetPastGameMetadata(ctx, models.GetPastGameMetadataParams{ + Gid: id, + CreatedAt: pgtype.Timestamptz{Time: basicInfo.CreatedAt.Time, Valid: true}, + }) + if err != nil { + log.Err(err).Msg("error-get-past-game-metadata-fallback") + return nil, err + } -} + gr, err := ParseGameRequest(pastMeta.GameRequest) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-request") + return nil, err + } -func (s *DBStore) GetRematchStreak(ctx context.Context, originalRequestId string) (*gs.StreakInfoResponse, error) { - games := []*game{} - ctxDB := s.db.WithContext(ctx) - result := ctxDB.Raw(`SELECT uuid, winner_idx, quickdata FROM games where quickdata->>'o' = ? - AND game_end_reason not in (?, ?, ?) ORDER BY created_at desc`, originalRequestId, - pb.GameEndReason_NONE, pb.GameEndReason_ABORTED, pb.GameEndReason_CANCELLED).Scan(&games) - if result.Error != nil { - return nil, result.Error + // Get time control name + timefmt := entity.TCRegular + if gr != nil { + tc, _, err := entity.VariantFromGameReq(gr) + if err == nil { + timefmt = tc + } + } + + winner := int32(-1) + if pastMeta.WinnerIdx.Valid { + winner = int32(pastMeta.WinnerIdx.Int16) + } + + // Parse tournament info from JSONB + var tourneyID string + if pastMeta.TournamentData != nil { + var td entity.TournamentData + if err := json.Unmarshal(pastMeta.TournamentData, &td); err == nil && td.Id != "" { + tourneyID = td.Id + } + } + + return &pb.GameInfoResponse{ + Players: pastMeta.Quickdata.PlayerInfo, + GameEndReason: pb.GameEndReason(pastMeta.GameEndReason), + Scores: pastMeta.Quickdata.FinalScores, + Winner: winner, + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(basicInfo.CreatedAt.Time), + LastUpdate: timestamppb.New(basicInfo.UpdatedAt.Time), + GameId: id, + GameRequest: gr, + TournamentId: tourneyID, + Type: pb.GameType(pastMeta.Type), + }, nil + } + log.Err(err).Msg("error-get-live-game-metadata") + return nil, err } - resp := &gs.StreakInfoResponse{ - Streak: make([]*gs.StreakInfoResponse_SingleGameInfo, len(games)), + // Successfully got metadata from games table + // Note that the game request is stored as proto in the current games + // table, but as protojson in past games table. We will likely migrate + // the current games table to use protojson as well in the future. + gr, err := ParseGameRequest(g.Request) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-request") + return nil, err } - if result.RowsAffected <= 0 { - return resp, nil + // Get time control name + timefmt := entity.TCRegular + if gr != nil { + tc, _, err := entity.VariantFromGameReq(gr) + if err == nil { + timefmt = tc + } } - for idx, g := range games { - var mdata entity.Quickdata - err := json.Unmarshal(g.Quickdata, &mdata) + // Extract tournament info + var tourneyID string + if g.TournamentData.Id != "" { + tourneyID = g.TournamentData.Id + } + + return &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameEndReason: pb.GameEndReason(g.GameEndReason.Int32), + Scores: g.Quickdata.FinalScores, + Winner: int32(g.WinnerIdx.Int32), + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(g.CreatedAt.Time), + LastUpdate: timestamppb.New(g.UpdatedAt.Time), + GameId: g.Uuid.String, + GameRequest: gr, + TournamentId: tourneyID, + Type: pb.GameType(g.Type.Int32), + }, nil +} + +// func (s *DBStore) + +func (s *DBStore) GetRematchStreak(ctx context.Context, originalRequestId string) (*gs.StreakInfoResponse, error) { + resp := &gs.StreakInfoResponse{} + + if s.usePastGamesTable { + // New path: use game_players table + games, err := s.queries.GetRematchStreak(ctx, originalRequestId) if err != nil { - log.Debug().Err(err).Msg("convert-game-quickdata") - // If it's empty or unconvertible don't quit. We need this - // for backwards compatibility. - } - if idx == 0 { - playersInfo := make([]*gs.StreakInfoResponse_PlayerInfo, len(mdata.PlayerInfo)) - for i, p := range mdata.PlayerInfo { - playersInfo[i] = &gs.StreakInfoResponse_PlayerInfo{ - Nickname: p.Nickname, - Uuid: p.UserId, - } + return nil, err + } + + resp.Streak = make([]*gs.StreakInfoResponse_SingleGameInfo, len(games)) + + if len(games) == 0 { + return resp, nil + } + + // Get player info from the first game from past_games table + firstGameID := games[0].Gid + + // Get the created_at timestamp from games table to query past_games + gameRow, err := s.queries.GetGameBasicInfo(ctx, common.ToPGTypeText(firstGameID)) + if err != nil { + return nil, fmt.Errorf("failed to get game for streak: %w", err) + } + + pastGame, err := s.queries.GetPastGame(ctx, models.GetPastGameParams{ + Gid: firstGameID, + CreatedAt: pgtype.Timestamptz{Time: gameRow.CreatedAt.Time, Valid: true}, + }) + if err != nil { + return nil, fmt.Errorf("failed to get past game for streak: %w", err) + } + + // Extract player info from quickdata + if len(pastGame.Quickdata.PlayerInfo) >= 2 { + resp.PlayersInfo = []*gs.StreakInfoResponse_PlayerInfo{ + {Nickname: pastGame.Quickdata.PlayerInfo[0].Nickname, Uuid: pastGame.Quickdata.PlayerInfo[0].UserId}, + {Nickname: pastGame.Quickdata.PlayerInfo[1].Nickname, Uuid: pastGame.Quickdata.PlayerInfo[1].UserId}, } - sort.Slice(playersInfo, func(i, j int) bool { return playersInfo[i].Nickname > playersInfo[j].Nickname }) - resp.PlayersInfo = playersInfo } - winner := g.WinnerIdx - if len(resp.PlayersInfo) > 0 && len(mdata.PlayerInfo) > 0 && - resp.PlayersInfo[0].Nickname != mdata.PlayerInfo[0].Nickname { - if winner != -1 { - winner = 1 - winner + for idx, g := range games { + resp.Streak[idx] = &gs.StreakInfoResponse_SingleGameInfo{ + GameId: g.Gid, + Winner: g.WinnerIdx, } } - resp.Streak[idx] = &gs.StreakInfoResponse_SingleGameInfo{ - GameId: g.UUID, - Winner: int32(winner), + } else { + // Old path: use games table directly + games, err := s.queries.GetRematchStreakOld(ctx, originalRequestId) + if err != nil { + return nil, err + } + + resp.Streak = make([]*gs.StreakInfoResponse_SingleGameInfo, len(games)) + + if len(games) == 0 { + return resp, nil + } + + // Get player info from the first game + firstGameID := games[0].Gid.String + firstGame, err := s.queries.GetGameFullData(ctx, common.ToPGTypeText(firstGameID)) + if err != nil { + return nil, fmt.Errorf("failed to get first game for streak: %w", err) + } + + // Extract player info from quickdata + if len(firstGame.Quickdata.PlayerInfo) >= 2 { + resp.PlayersInfo = []*gs.StreakInfoResponse_PlayerInfo{ + {Nickname: firstGame.Quickdata.PlayerInfo[0].Nickname, Uuid: firstGame.Quickdata.PlayerInfo[0].UserId}, + {Nickname: firstGame.Quickdata.PlayerInfo[1].Nickname, Uuid: firstGame.Quickdata.PlayerInfo[1].UserId}, + } + } + + for idx, g := range games { + winner := int32(-1) + if g.WinnerIdx.Valid { + winner = g.WinnerIdx.Int32 + } + resp.Streak[idx] = &gs.StreakInfoResponse_SingleGameInfo{ + GameId: g.Gid.String, + Winner: winner, + } } } @@ -279,391 +684,561 @@ func (s *DBStore) GetRecentGames(ctx context.Context, username string, numGames if numGames > MaxRecentGames { return nil, errors.New("too many games") } - ctxDB := s.db.WithContext(ctx) - var games []*game - - if err := ctxDB.Transaction(func(tx *gorm.DB) error { - - var userId int64 - if results := tx.Raw( - "select id from users where lower(username) = lower(?)", - username). - Scan(&userId); results.Error != nil { - - return results.Error - } else if results.RowsAffected != 1 { - // Note: With gorm, Scan does not return an error when the row is not found. - // No users means no games. - // There should already be a unique key on (lower(username)), so there cannot be multiple matches. - return nil - } - - // Note: The query now sorts by id. It used to sort by created_at, which was not indexed. - // Note: A partial index may be helpful for the few players with the most number of completed games. - // Note: This query only selects ids, to reduce the amount of work required by the db to paginate. - var gameIds []int64 - if results := tx.Raw( - `select id from games where (player0_id = ? or player1_id = ?) - and game_end_reason not in (?, ?, ?) order by id desc limit ? offset ?`, - userId, userId, - pb.GameEndReason_NONE, pb.GameEndReason_ABORTED, pb.GameEndReason_CANCELLED, numGames, offset). - Find(&gameIds); results.Error != nil { - - return results.Error - } else if results.RowsAffected == 0 { - // No game ids means no games. - return nil - } - - // convertGamesToInfoResponses does not need History. - // This still reads each history, but then garbage-collects immediately. - // The "correct" way is to manually list all surviving column names. - if results := tx.Raw( - "select *, null history from games where id in ? order by id desc", - gameIds). - Find(&games); results.Error != nil { - - return results.Error - } - - return nil - }, &sql.TxOptions{ - Isolation: sql.LevelRepeatableRead, - ReadOnly: true, - }); err != nil { - // Note: REPEATABLE READ is correct for Postgres (other databases may require SERIALIZABLE to avoid phantom reads). - // The default READ COMMITTED may return invalid rows if an update invalidates the row after the id has been chosen. - log.Err(err).Str("username", username).Int("numGames", numGames).Int("offset", offset).Msg("get-recent-games") - return nil, err + + var responses []*pb.GameInfoResponse + + if s.usePastGamesTable { + // New path: use game_players and past_games tables + games, err := s.queries.GetRecentGamesByUsername(ctx, models.GetRecentGamesByUsernameParams{ + Username: username, + OffsetGames: int32(offset), + NumGames: int32(numGames), + }) + if err != nil { + return nil, err + } + + for _, g := range games { + // Parse the GameRequest from bytes + gameRequest, err := ParseGameRequest(g.GameRequest) + if err != nil { + log.Err(err).Msg("error-parsing-game-request") + continue // Skip this game if we can't parse its request + } + + // Get time control name + timefmt := entity.TCRegular + if gameRequest != nil { + tc, _, err := entity.VariantFromGameReq(gameRequest) + if err == nil { + timefmt = tc + } + } + + winner := int32(-1) + if g.WinnerIdx.Valid { + winner = int32(g.WinnerIdx.Int16) + } + + // Parse tournament info from JSONB + var tourneyID string + var tDiv string + var tRound int32 + var tGameIndex int32 + if g.TournamentData != nil { + var td entity.TournamentData + if err := json.Unmarshal(g.TournamentData, &td); err == nil && td.Id != "" { + tourneyID = td.Id + tDiv = td.Division + tRound = int32(td.Round) + tGameIndex = int32(td.GameIndex) + } + } + + info := &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameEndReason: pb.GameEndReason(g.GameEndReason), + Scores: g.Quickdata.FinalScores, + Winner: winner, + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(g.CreatedAt.Time), + LastUpdate: timestamppb.New(g.CreatedAt.Time), // Using created_at as proxy for last update + GameId: g.GameUuid, + GameRequest: gameRequest, + Type: pb.GameType(g.GameType), + TournamentId: tourneyID, + TournamentDivision: tDiv, + TournamentRound: tRound, + TournamentGameIndex: tGameIndex, + } + responses = append(responses, info) + } + } else { + // Old path: use games table directly + games, err := s.queries.GetRecentGamesByUsernameOld(ctx, models.GetRecentGamesByUsernameOldParams{ + Username: pgtype.Text{String: username, Valid: true}, + OffsetGames: int32(offset), + NumGames: int32(numGames), + }) + if err != nil { + return nil, err + } + + for _, g := range games { + // Parse the GameRequest from bytes + gameRequest, err := ParseGameRequest(g.GameRequest) + if err != nil { + log.Err(err).Msg("error-parsing-game-request") + continue // Skip this game if we can't parse its request + } + + // Get time control name + timefmt := entity.TCRegular + if gameRequest != nil { + tc, _, err := entity.VariantFromGameReq(gameRequest) + if err == nil { + timefmt = tc + } + } + + winner := int32(-1) + if g.WinnerIdx.Valid { + winner = g.WinnerIdx.Int32 + } + + gameEndReason := pb.GameEndReason_NONE + if g.GameEndReason.Valid { + gameEndReason = pb.GameEndReason(g.GameEndReason.Int32) + } + + gameType := pb.GameType_NATIVE + if g.GameType.Valid { + gameType = pb.GameType(g.GameType.Int32) + } + + info := &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameEndReason: gameEndReason, + Scores: g.Quickdata.FinalScores, + Winner: winner, + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(g.CreatedAt.Time), + LastUpdate: timestamppb.New(g.CreatedAt.Time), // Using created_at as proxy for last update + GameId: g.GameUuid.String, + GameRequest: gameRequest, + Type: gameType, + } + responses = append(responses, info) + } } - return convertGamesToInfoResponses(games) + return &pb.GameInfoResponses{GameInfo: responses}, nil } func (s *DBStore) GetRecentTourneyGames(ctx context.Context, tourneyID string, numGames int, offset int) (*pb.GameInfoResponses, error) { if numGames > MaxRecentGames { return nil, errors.New("too many games") } - ctxDB := s.db.WithContext(ctx) - var games []*game - - if err := ctxDB.Transaction(func(tx *gorm.DB) error { - - // Note: This query only selects ids, to reduce the amount of work required by the db to paginate. - var gameIds []int64 - if results := tx.Raw( - `select id from games where tournament_id = ? - and game_end_reason not in (?, ?, ?) order by updated_at desc limit ? offset ?`, - tourneyID, - pb.GameEndReason_NONE, pb.GameEndReason_ABORTED, pb.GameEndReason_CANCELLED, numGames, offset). - Find(&gameIds); results.Error != nil { - - return results.Error - } else if results.RowsAffected == 0 { - // No game ids means no games. - return nil - } - - // convertGamesToInfoResponses does not need History. - // This still reads each history, but then garbage-collects immediately. - // The "correct" way is to manually list all surviving column names. - if results := tx.Raw( - "select *, null history from games where id in ? order by updated_at desc", - gameIds). - Find(&games); results.Error != nil { - - return results.Error - } - - return nil - }, &sql.TxOptions{ - Isolation: sql.LevelRepeatableRead, - ReadOnly: true, - }); err != nil { - // Note: REPEATABLE READ is correct for Postgres (other databases may require SERIALIZABLE to avoid phantom reads). - // The default READ COMMITTED may return invalid rows if an update invalidates the row after the id has been chosen. - log.Err(err).Str("tourneyID", tourneyID).Int("numGames", numGames).Int("offset", offset).Msg("get-recent-tourney-games") - return nil, err - } - return convertGamesToInfoResponses(games) -} + var responses []*pb.GameInfoResponse -func convertGamesToInfoResponses(games []*game) (*pb.GameInfoResponses, error) { - responses := []*pb.GameInfoResponse{} - for _, g := range games { - info, err := convertGameToInfoResponse(g) + if s.usePastGamesTable { + // New path: use past_games table + games, err := s.queries.GetRecentTourneyGames(ctx, models.GetRecentTourneyGamesParams{ + TourneyID: tourneyID, + OffsetGames: int32(offset), + NumGames: int32(numGames), + }) if err != nil { return nil, err } - responses = append(responses, info) - } - return &pb.GameInfoResponses{GameInfo: responses}, nil -} -func convertGameToInfoResponse(g *game) (*pb.GameInfoResponse, error) { - var mdata entity.Quickdata + for _, g := range games { + // Parse the GameRequest from bytes + gameRequest, err := ParseGameRequest(g.GameRequest) + if err != nil { + log.Err(err).Msg("error-parsing-game-request") + continue // Skip this game if we can't parse its request + } - err := json.Unmarshal(g.Quickdata, &mdata) - if err != nil { - log.Debug().Err(err).Msg("convert-game-quickdata") - // If it's empty or unconvertible don't quit. We need this - // for backwards compatibility. - } + // Get time control name + timefmt := entity.TCRegular + if gameRequest != nil { + tc, _, err := entity.VariantFromGameReq(gameRequest) + if err == nil { + timefmt = tc + } + } - gamereq := &pb.GameRequest{} - err = proto.Unmarshal(g.Request, gamereq) - if err != nil { - return nil, err - } - timefmt, _, err := entity.VariantFromGameReq(gamereq) - if err != nil { - return nil, err - } + winner := int32(-1) + if g.WinnerIdx.Valid { + winner = int32(g.WinnerIdx.Int16) + } - var trdata entity.TournamentData - tDiv := "" - tRound := 0 - tGameIndex := 0 - tid := "" + // Parse tournament info from JSONB + var tDiv string + var tRound int32 + var tGameIndex int32 + if g.TournamentData != nil { + var td entity.TournamentData + if err := json.Unmarshal(g.TournamentData, &td); err == nil { + tDiv = td.Division + tRound = int32(td.Round) + tGameIndex = int32(td.GameIndex) + } + } - err = json.Unmarshal(g.TournamentData, &trdata) - if err == nil { - tDiv = trdata.Division - tRound = trdata.Round - tGameIndex = trdata.GameIndex - tid = trdata.Id - } - - info := &pb.GameInfoResponse{ - Players: mdata.PlayerInfo, - GameEndReason: pb.GameEndReason(g.GameEndReason), - Scores: mdata.FinalScores, - Winner: int32(g.WinnerIdx), - TimeControlName: string(timefmt), - CreatedAt: timestamppb.New(g.CreatedAt), - LastUpdate: timestamppb.New(g.UpdatedAt), - GameId: g.UUID, - TournamentId: tid, - GameRequest: gamereq, - TournamentDivision: tDiv, - TournamentRound: int32(tRound), - TournamentGameIndex: int32(tGameIndex), - Type: g.Type, - } - return info, nil + info := &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameEndReason: pb.GameEndReason(g.GameEndReason), + Scores: g.Quickdata.FinalScores, + Winner: winner, + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(g.CreatedAt.Time), + LastUpdate: timestamppb.New(g.CreatedAt.Time), + GameId: g.Gid, + TournamentId: tourneyID, + GameRequest: gameRequest, + TournamentDivision: tDiv, + TournamentRound: tRound, + TournamentGameIndex: tGameIndex, + Type: pb.GameType(g.Type), + } + responses = append(responses, info) + } + } else { + // Old path: use games table directly + games, err := s.queries.GetRecentTourneyGamesOld(ctx, models.GetRecentTourneyGamesOldParams{ + TourneyID: tourneyID, + OffsetGames: int32(offset), + NumGames: int32(numGames), + }) + if err != nil { + return nil, err + } + + for _, g := range games { + // Parse the GameRequest from bytes + gameRequest, err := ParseGameRequest(g.GameRequest) + if err != nil { + log.Err(err).Msg("error-parsing-game-request") + continue // Skip this game if we can't parse its request + } + + // Get time control name + timefmt := entity.TCRegular + if gameRequest != nil { + tc, _, err := entity.VariantFromGameReq(gameRequest) + if err == nil { + timefmt = tc + } + } + + winner := int32(-1) + if g.WinnerIdx.Valid { + winner = g.WinnerIdx.Int32 + } + + gameEndReason := pb.GameEndReason_NONE + if g.GameEndReason.Valid { + gameEndReason = pb.GameEndReason(g.GameEndReason.Int32) + } + + gameType := pb.GameType_NATIVE + if g.Type.Valid { + gameType = pb.GameType(g.Type.Int32) + } + + // Extract tournament info + var tDiv string + var tRound int32 + var tGameIndex int32 + if g.TournamentData.Id != "" { + tDiv = g.TournamentData.Division + tRound = int32(g.TournamentData.Round) + tGameIndex = int32(g.TournamentData.GameIndex) + } + + info := &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameEndReason: gameEndReason, + Scores: g.Quickdata.FinalScores, + Winner: winner, + TimeControlName: string(timefmt), + CreatedAt: timestamppb.New(g.CreatedAt.Time), + LastUpdate: timestamppb.New(g.CreatedAt.Time), + GameId: g.Gid.String, + TournamentId: tourneyID, + GameRequest: gameRequest, + TournamentDivision: tDiv, + TournamentRound: tRound, + TournamentGameIndex: tGameIndex, + Type: gameType, + } + responses = append(responses, info) + } + } + + return &pb.GameInfoResponses{GameInfo: responses}, nil } +// TODO: Remove these GORM-based functions once migrated to sqlc + // Set takes in a game entity that _already exists_ in the DB, and writes it to // the database. func (s *DBStore) Set(ctx context.Context, g *entity.Game) error { - // s.db.LogMode(true) - dbg, err := s.toDBObj(g) + hist, err := proto.Marshal(g.History()) if err != nil { return err } - th := &macondopb.GameHistory{} - err = proto.Unmarshal(dbg.History, th) + + // Marshal GameRequest as proto for live games table + requestBytes, err := MarshalGameRequestAsProto(g.GameReq.GameRequest) if err != nil { return err } - // result := s.db.Model(&game{}).Set("gorm:query_option", "FOR UPDATE"). - // Where("uuid = ?", g.GameID()).Update(dbg) - // s.db.LogMode(false) - - // XXX: not sure this select for update is working. Might consider - // moving to select for share?? - ctxDB := s.db.WithContext(ctx) - result := ctxDB.Model(&game{}).Clauses(clause.Locking{Strength: "UPDATE"}). - Where("uuid = ?", g.GameID()).Updates(dbg) + var tourneyID pgtype.Text + if g.TournamentData != nil && g.TournamentData.Id != "" { + tourneyID = pgtype.Text{String: g.TournamentData.Id, Valid: true} + } - return result.Error + return s.queries.UpdateGame(ctx, models.UpdateGameParams{ + UpdatedAt: pgtype.Timestamptz{Time: g.CreatedAt, Valid: true}, // Use CreatedAt as proxy + Player0ID: pgtype.Int4{Int32: int32(g.PlayerDBIDs[0]), Valid: true}, + Player1ID: pgtype.Int4{Int32: int32(g.PlayerDBIDs[1]), Valid: true}, + Timers: g.Timers, + Started: pgtype.Bool{Bool: g.Started, Valid: true}, + GameEndReason: pgtype.Int4{Int32: int32(g.GameEndReason), Valid: true}, + WinnerIdx: pgtype.Int4{Int32: int32(g.WinnerIdx), Valid: true}, + LoserIdx: pgtype.Int4{Int32: int32(g.LoserIdx), Valid: true}, + Request: requestBytes, + History: hist, + Stats: *g.Stats, + Quickdata: *g.Quickdata, + TournamentData: *g.TournamentData, + TournamentID: tourneyID, + ReadyFlag: pgtype.Int8{Int64: 0, Valid: true}, // Default to 0 + MetaEvents: *g.MetaEvents, + Uuid: common.ToPGTypeText(g.GameID()), + }) } func (s *DBStore) Exists(ctx context.Context, id string) (bool, error) { - - var count int64 - result := s.db.Model(&game{}).Where("uuid = ?", id).Count(&count) - if result.Error != nil { - return false, result.Error + // Check if game exists in games table. Note that we only need to check this + // table because we don't migrate the ID to the partitioned past games table. + exists, err := s.queries.GameExists(ctx, common.ToPGTypeText(id)) + if err != nil { + log.Err(err).Msg("error-checking-game-exists") + return false, err } - if count > 1 { - return true, errors.New("unexpected duplicate ids") + if !exists { + log.Debug().Str("game_id", id).Msg("game-not-found-in-live-games") + return false, nil } - return count == 1, nil + return true, nil } // Create saves a brand new entity to the database func (s *DBStore) Create(ctx context.Context, g *entity.Game) error { - dbg, err := s.toDBObj(g) + hist, err := proto.Marshal(g.History()) + if err != nil { + return err + } + + // Marshal GameRequest as proto for live games table + requestBytes, err := MarshalGameRequestAsProto(g.GameReq.GameRequest) if err != nil { return err } - log.Debug().Interface("dbg", dbg).Msg("dbg") - ctxDB := s.db.WithContext(ctx) - result := ctxDB.Create(dbg) - return result.Error + + var tourneyID pgtype.Text + if g.TournamentData != nil && g.TournamentData.Id != "" { + tourneyID = pgtype.Text{String: g.TournamentData.Id, Valid: true} + } + + return s.queries.CreateGame(ctx, models.CreateGameParams{ + CreatedAt: pgtype.Timestamptz{Time: g.CreatedAt, Valid: true}, + UpdatedAt: pgtype.Timestamptz{Time: g.CreatedAt, Valid: true}, + Uuid: common.ToPGTypeText(g.GameID()), + Player0ID: pgtype.Int4{Int32: int32(g.PlayerDBIDs[0]), Valid: true}, + Player1ID: pgtype.Int4{Int32: int32(g.PlayerDBIDs[1]), Valid: true}, + Timers: g.Timers, + Started: pgtype.Bool{Bool: g.Started, Valid: true}, + GameEndReason: pgtype.Int4{Int32: int32(g.GameEndReason), Valid: true}, + WinnerIdx: pgtype.Int4{Int32: int32(g.WinnerIdx), Valid: true}, + LoserIdx: pgtype.Int4{Int32: int32(g.LoserIdx), Valid: true}, + Request: requestBytes, + History: hist, + Stats: *g.Stats, + Quickdata: *g.Quickdata, + TournamentData: *g.TournamentData, + TournamentID: tourneyID, + ReadyFlag: pgtype.Int8{Int64: 0, Valid: true}, // Default to 0 + MetaEvents: *g.MetaEvents, + Type: pgtype.Int4{Int32: int32(g.Type), Valid: true}, + }) } func (s *DBStore) CreateRaw(ctx context.Context, g *entity.Game, gt pb.GameType) error { if gt == pb.GameType_NATIVE { return fmt.Errorf("this game already exists: %s", g.Uid()) } - ctxDB := s.db.WithContext(ctx) - req, err := proto.Marshal(g.GameReq) + hist, err := proto.Marshal(g.History()) if err != nil { return err } - hist, err := proto.Marshal(g.History()) + + // Marshal GameRequest as proto for live games table + requestBytes, err := MarshalGameRequestAsProto(g.GameReq.GameRequest) if err != nil { return err } - result := ctxDB.Exec( - `insert into games(uuid, request, history, quickdata, timers, - game_end_reason, type) - values(?, ?, ?, ?, ?, ?, ?)`, - g.Uid(), req, hist, g.Quickdata, g.Timers, g.GameEndReason, gt) - return result.Error + + return s.queries.CreateRawGame(ctx, models.CreateRawGameParams{ + Uuid: common.ToPGTypeText(g.Uid()), + Request: requestBytes, + History: hist, + Quickdata: *g.Quickdata, + Timers: g.Timers, + GameEndReason: pgtype.Int4{Int32: int32(g.GameEndReason), Valid: true}, + Type: pgtype.Int4{Int32: int32(gt), Valid: true}, + }) } func (s *DBStore) ListActive(ctx context.Context, tourneyID string) (*pb.GameInfoResponses, error) { - var games []*game - - ctxDB := s.db.WithContext(ctx) - query := ctxDB.Table("games").Select("quickdata, request, uuid, started, tournament_data"). - Where("games.game_end_reason = ?", 0 /* ongoing games only*/) + var responses []*pb.GameInfoResponse if tourneyID != "" { - query = query.Where("games.tournament_id = ?", tourneyID) - } - - result := query.Order("games.id").Scan(&games) - - if result.Error != nil { - return nil, result.Error + games, err := s.queries.ListActiveTournamentGames(ctx, common.ToPGTypeText(tourneyID)) + if err != nil { + return nil, err + } + for _, g := range games { + info := &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameId: g.Uuid.String, + Type: pb.GameType_NATIVE, // Default type for active games + } + responses = append(responses, info) + } + } else { + games, err := s.queries.ListActiveGames(ctx) + if err != nil { + return nil, err + } + for _, g := range games { + info := &pb.GameInfoResponse{ + Players: g.Quickdata.PlayerInfo, + GameId: g.Uuid.String, + Type: pb.GameType_NATIVE, // Default type for active games + } + responses = append(responses, info) + } } - return convertGamesToInfoResponses(games) -} - -func (s *DBStore) Count(ctx context.Context) (int64, error) { - var count int64 - result := s.db.Model(&game{}).Count(&count) - if result.Error != nil { - return 0, result.Error - } - return count, nil + return &pb.GameInfoResponses{GameInfo: responses}, nil } // List all game IDs, ordered by date played. Should not be used by anything // other than debug or migration code when the db is still small. func (s *DBStore) ListAllIDs(ctx context.Context) ([]string, error) { - var gids []struct{ Uuid string } - result := s.db.Table("games").Select("uuid").Order("created_at").Scan(&gids) - ids := make([]string, len(gids)) - for idx, gid := range gids { - ids[idx] = gid.Uuid + ids, err := s.queries.ListAllIDs(ctx) + if err != nil { + log.Err(err).Msg("error-listing-all-ids") + return nil, err } - - return ids, result.Error + gameIDs := make([]string, len(ids)) + for i, id := range ids { + gameIDs[i] = id.String + } + return gameIDs, nil } func (s *DBStore) SetReady(ctx context.Context, gid string, pidx int) (int, error) { - var rf struct { - ReadyFlag int + readyRes, err := s.queries.SetReady(ctx, models.SetReadyParams{ + PlayerIdx: int32(pidx), + Uuid: common.ToPGTypeText(gid), + }) + + if err != nil { + // Only error now is if game doesn't exist + log.Err(err).Int("playerIdx", pidx).Str("gid", gid).Msg("setting-ready") + return 0, err } - ctxDB := s.db.WithContext(ctx) - // If the game is already ready and this gets called again, this function - // returns 0 rows, which means rf.ReadyFlag == 0 and the game won't start again. - result := ctxDB.Raw(`update games set ready_flag = ready_flag | (1 << ?) where uuid = ? - and ready_flag & (1 << ?) = 0 returning ready_flag`, pidx, gid, pidx).Scan(&rf) + log.Debug().Int("playerIdx", pidx).Str("gid", gid).Int("readyFlag", int(readyRes.Int64)).Msg("player-set-ready") + return int(readyRes.Int64), nil +} + +// TODO: Remove this GORM-based function - return rf.ReadyFlag, result.Error +func (s *DBStore) Disconnect() { + log.Warn().Msg("game-store-disconnect-not-implemented") } -func (s *DBStore) toDBObj(g *entity.Game) (*game, error) { - timers, err := json.Marshal(g.Timers) - if err != nil { - return nil, err - } - stats, err := json.Marshal(g.Stats) - if err != nil { - return nil, err - } - quickdata, err := json.Marshal(g.Quickdata) - if err != nil { - return nil, err - } - mdata, err := json.Marshal(g.MetaEvents) - if err != nil { - return nil, err - } - req, err := proto.Marshal(g.GameReq) - if err != nil { - return nil, err - } - hist, err := proto.Marshal(g.History()) - if err != nil { - return nil, err - } +func (s *DBStore) CachedCount(ctx context.Context) int { + return 0 +} - tourneydata, err := json.Marshal(g.TournamentData) +func (s *DBStore) GetHistory(ctx context.Context, id string) (*macondopb.GameHistory, error) { + // First check if the game has been migrated + basicInfo, err := s.queries.GetGameBasicInfo(ctx, common.ToPGTypeText(id)) if err != nil { + log.Err(err).Msg("error-get-game-basic-info-in-get-history") return nil, err } - dbg := &game{ - UUID: g.GameID(), - Player0ID: g.PlayerDBIDs[0], - Player1ID: g.PlayerDBIDs[1], - Timers: timers, - Stats: stats, - Quickdata: quickdata, - Started: g.Started, - GameEndReason: int(g.GameEndReason), - WinnerIdx: g.WinnerIdx, - LoserIdx: g.LoserIdx, - Request: req, - History: hist, - TournamentData: tourneydata, - MetaEvents: mdata, - Type: g.Type, - } - if g.TournamentData != nil { - dbg.TournamentID = g.TournamentData.Id - } + // Check if the game has been migrated to past_games + if basicInfo.MigrationStatus.Valid && basicInfo.MigrationStatus.Int16 >= MigrationStatusMigrated { + // Game has been migrated, get from past_games + pastGame, err := s.queries.GetPastGame(ctx, models.GetPastGameParams{ + Gid: id, + CreatedAt: pgtype.Timestamptz{Time: basicInfo.CreatedAt.Time, Valid: true}, + }) + if err != nil { + log.Err(err).Msg("error-get-past-game-in-get-history") + return nil, err + } - return dbg, nil -} + // Parse the game document + doc := &pb.GameDocument{} + err = protojson.Unmarshal(pastGame.GameDocument, doc) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-document-in-get-history") + return nil, err + } -func (s *DBStore) Disconnect() { - dbSQL, err := s.db.DB() - if err == nil { - log.Info().Msg("disconnecting SQL db") - dbSQL.Close() - return + // Convert to game history + gh, err := utilities.ToGameHistory(doc, s.cfg) + if err != nil { + log.Err(err).Msg("error-converting-game-document-to-history") + return nil, err + } + log.Debug().Interface("hist", gh).Msg("got-history-from-past-games") + return gh, nil } - log.Err(err).Msg("unable to disconnect") -} -func (s *DBStore) CachedCount(ctx context.Context) int { - return 0 -} + // Game not migrated, try to get from games table + bts, err := s.queries.GetHistory(ctx, common.ToPGTypeText(id)) + if err != nil { + // If this fails, it might be a legacy migration without proper status + // Try to get from past_games as fallback + if basicInfo.GameEndReason.Valid && basicInfo.GameEndReason.Int32 != int32(pb.GameEndReason_NONE) { + pastGame, err := s.queries.GetPastGame(ctx, models.GetPastGameParams{ + Gid: id, + CreatedAt: pgtype.Timestamptz{Time: basicInfo.CreatedAt.Time, Valid: true}, + }) + if err != nil { + log.Err(err).Msg("error-get-past-game-fallback-in-get-history") + return nil, err + } -func (s *DBStore) GetHistory(ctx context.Context, id string) (*macondopb.GameHistory, error) { - g := &game{} + // Parse the game document + doc := &pb.GameDocument{} + err = protojson.Unmarshal(pastGame.GameDocument, doc) + if err != nil { + log.Err(err).Msg("error-unmarshalling-game-document-fallback") + return nil, err + } - ctxDB := s.db.WithContext(ctx) - if result := ctxDB.Select("history").Where("uuid = ?", id).First(g); result.Error != nil { - return nil, result.Error + // Convert to game history + gh, err := utilities.ToGameHistory(doc, s.cfg) + if err != nil { + log.Err(err).Msg("error-converting-game-document-to-history-fallback") + return nil, err + } + log.Debug().Interface("hist", gh).Msg("got-history-from-past-games-fallback") + return gh, nil + } + log.Err(err).Msg("error-get-history") + return nil, err } hist := &macondopb.GameHistory{} - err := proto.Unmarshal(g.History, hist) + err = proto.Unmarshal(bts, hist) if err != nil { return nil, err } diff --git a/pkg/stores/game/migration.go b/pkg/stores/game/migration.go new file mode 100644 index 000000000..02be50697 --- /dev/null +++ b/pkg/stores/game/migration.go @@ -0,0 +1,156 @@ +package game + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/jackc/pgx/v5/pgtype" + "github.com/rs/zerolog/log" + "google.golang.org/protobuf/encoding/protojson" + + "github.com/woogles-io/liwords/pkg/entity" + "github.com/woogles-io/liwords/pkg/entity/utilities" + "github.com/woogles-io/liwords/pkg/stores/common" + "github.com/woogles-io/liwords/pkg/stores/models" +) + +// MigrateGameToPastGames migrates a completed game to the past_games and game_players tables +func (s *DBStore) MigrateGameToPastGames(ctx context.Context, g *entity.Game, ratingsBefore, ratingsAfter map[string]int32) error { + // Convert game to GameDocument format + doc, err := utilities.ToGameDocument(g, s.cfg) + if err != nil { + return fmt.Errorf("converting to game document: %w", err) + } + + docJSON, err := protojson.Marshal(doc) + if err != nil { + return fmt.Errorf("marshaling game document: %w", err) + } + + // Start transaction for all migration operations + tx, err := s.dbPool.Begin(ctx) + if err != nil { + return fmt.Errorf("starting transaction: %w", err) + } + defer tx.Rollback(ctx) + + // Create queries with transaction + txQueries := s.queries.WithTx(tx) + + // Marshal GameRequest as protojson for game_metadata table + gameRequestJSON, err := MarshalGameRequestAsJSON(g.GameReq.GameRequest) + if err != nil { + return fmt.Errorf("marshaling game request: %w", err) + } + + // Marshal TournamentData as JSON for game_metadata + var tournamentDataJSON []byte + if g.TournamentData != nil { + tournamentDataJSON, err = json.Marshal(g.TournamentData) + if err != nil { + return fmt.Errorf("marshaling tournament data: %w", err) + } + } + + // Insert into game_metadata first + err = txQueries.InsertGameMetadata(ctx, models.InsertGameMetadataParams{ + GameUuid: g.GameID(), + CreatedAt: pgtype.Timestamptz{Time: g.CreatedAt, Valid: true}, + GameRequest: gameRequestJSON, + TournamentData: tournamentDataJSON, + }) + if err != nil { + return fmt.Errorf("inserting into game_metadata: %w", err) + } + + // Insert into past_games (without game_request and tournament_data) + err = txQueries.InsertPastGame(ctx, models.InsertPastGameParams{ + Gid: g.GameID(), + CreatedAt: pgtype.Timestamptz{Time: g.CreatedAt, Valid: true}, + GameEndReason: int16(g.GameEndReason), + WinnerIdx: pgtype.Int2{Int16: int16(g.WinnerIdx), Valid: g.WinnerIdx >= -1}, + GameDocument: docJSON, + Stats: *g.Stats, + Quickdata: *g.Quickdata, + Type: int16(g.Type), + }) + if err != nil { + return fmt.Errorf("inserting into past_games: %w", err) + } + + // Insert game_players records (using transaction) + err = s.insertGamePlayersWithTx(ctx, txQueries, g, ratingsBefore, ratingsAfter) + if err != nil { + return fmt.Errorf("inserting game players: %w", err) + } + + // Update migration status + err = txQueries.UpdateGameMigrationStatus(ctx, models.UpdateGameMigrationStatusParams{ + MigrationStatus: pgtype.Int2{Int16: MigrationStatusMigrated, Valid: true}, + Uuid: common.ToPGTypeText(g.GameID()), + }) + if err != nil { + return fmt.Errorf("updating migration status: %w", err) + } + + // Note: In staged migration approach, we don't clear data immediately. + // Data remains in games table until separate cleanup phase. + + // Commit the transaction + if err = tx.Commit(ctx); err != nil { + return fmt.Errorf("committing transaction: %w", err) + } + + log.Info().Str("gameID", g.GameID()).Msg("game migrated to past_games (data preserved in games table)") + return nil +} + +// insertGamePlayersWithTx inserts game_players records within a transaction +func (s *DBStore) insertGamePlayersWithTx(ctx context.Context, queries *models.Queries, g *entity.Game, ratingsBefore, ratingsAfter map[string]int32) error { + for pidx := 0; pidx < 2; pidx++ { + opponentIdx := 1 - pidx + playerNick := g.History().Players[pidx].Nickname + + params := models.InsertGamePlayerParams{ + GameUuid: g.GameID(), + PlayerID: int32(g.PlayerDBIDs[pidx]), + PlayerIndex: int16(pidx), + Score: int32(g.PointsFor(pidx)), + GameEndReason: int16(g.GameEndReason), + CreatedAt: pgtype.Timestamptz{Time: g.CreatedAt, Valid: true}, + GameType: int16(g.Type), + OpponentID: int32(g.PlayerDBIDs[opponentIdx]), + OpponentScore: int32(g.PointsFor(opponentIdx)), + OriginalRequestID: pgtype.Text{String: g.Quickdata.OriginalRequestId, Valid: g.Quickdata.OriginalRequestId != ""}, + } + + // Set win/loss/tie + if g.WinnerIdx == pidx { + params.Won = pgtype.Bool{Bool: true, Valid: true} + } else if g.WinnerIdx == opponentIdx { + params.Won = pgtype.Bool{Bool: false, Valid: true} + } + // Leave as NULL for ties (WinnerIdx == -1) + + // Set rating data if available + if ratingsBefore != nil && ratingsAfter != nil { + if before, ok := ratingsBefore[playerNick]; ok { + params.RatingBefore = pgtype.Int4{Int32: before, Valid: true} + } + if after, ok := ratingsAfter[playerNick]; ok { + params.RatingAfter = pgtype.Int4{Int32: after, Valid: true} + if params.RatingBefore.Valid { + delta := after - params.RatingBefore.Int32 + params.RatingDelta = pgtype.Int4{Int32: delta, Valid: true} + } + } + } + + err := queries.InsertGamePlayer(ctx, params) + if err != nil { + return fmt.Errorf("inserting player %d: %w", pidx, err) + } + } + return nil +} diff --git a/pkg/stores/models/games.sql.go b/pkg/stores/models/games.sql.go index 50ab6cd3c..eaf9d16c2 100644 --- a/pkg/stores/models/games.sql.go +++ b/pkg/stores/models/games.sql.go @@ -9,14 +9,169 @@ import ( "context" "github.com/jackc/pgx/v5/pgtype" + "github.com/woogles-io/liwords/pkg/entity" ) -const getGame = `-- name: GetGame :one -SELECT id, created_at, updated_at, deleted_at, uuid, player0_id, player1_id, timers, started, game_end_reason, winner_idx, loser_idx, request, history, stats, quickdata, tournament_data, tournament_id, ready_flag, meta_events, type, game_request, history_in_s3 FROM games WHERE uuid = $1 +const clearGameDataAfterMigration = `-- name: ClearGameDataAfterMigration :exec +UPDATE games +SET history = NULL, + stats = NULL, + quickdata = NULL, + timers = NULL, + meta_events = NULL, + request = NULL, + tournament_data = NULL, + player0_id = NULL, + player1_id = NULL, + updated_at = NOW() +WHERE uuid = $1 ` -func (q *Queries) GetGame(ctx context.Context, uuid pgtype.Text) (Game, error) { - row := q.db.QueryRow(ctx, getGame, uuid) +func (q *Queries) ClearGameDataAfterMigration(ctx context.Context, uuid pgtype.Text) error { + _, err := q.db.Exec(ctx, clearGameDataAfterMigration, uuid) + return err +} + +const createGame = `-- name: CreateGame :exec +INSERT INTO games ( + created_at, updated_at, uuid, player0_id, player1_id, timers, + started, game_end_reason, winner_idx, loser_idx, request, + history, stats, quickdata, tournament_data, tournament_id, + ready_flag, meta_events, type) +VALUES ( + $1, $2, $3, $4, $5, $6, + $7, $8, $9, $10, $11, + $12, $13, $14, $15, $16, + $17, $18, $19) +RETURNING id +` + +type CreateGameParams struct { + CreatedAt pgtype.Timestamptz + UpdatedAt pgtype.Timestamptz + Uuid pgtype.Text + Player0ID pgtype.Int4 + Player1ID pgtype.Int4 + Timers entity.Timers + Started pgtype.Bool + GameEndReason pgtype.Int4 + WinnerIdx pgtype.Int4 + LoserIdx pgtype.Int4 + Request []byte + History []byte + Stats entity.Stats + Quickdata entity.Quickdata + TournamentData entity.TournamentData + TournamentID pgtype.Text + ReadyFlag pgtype.Int8 + MetaEvents entity.MetaEventData + Type pgtype.Int4 +} + +func (q *Queries) CreateGame(ctx context.Context, arg CreateGameParams) error { + _, err := q.db.Exec(ctx, createGame, + arg.CreatedAt, + arg.UpdatedAt, + arg.Uuid, + arg.Player0ID, + arg.Player1ID, + arg.Timers, + arg.Started, + arg.GameEndReason, + arg.WinnerIdx, + arg.LoserIdx, + arg.Request, + arg.History, + arg.Stats, + arg.Quickdata, + arg.TournamentData, + arg.TournamentID, + arg.ReadyFlag, + arg.MetaEvents, + arg.Type, + ) + return err +} + +const createRawGame = `-- name: CreateRawGame :exec +INSERT INTO games(uuid, request, history, quickdata, timers, + game_end_reason, type) +VALUES($1, $2, $3, $4, $5, + $6, $7) +` + +type CreateRawGameParams struct { + Uuid pgtype.Text + Request []byte + History []byte + Quickdata entity.Quickdata + Timers entity.Timers + GameEndReason pgtype.Int4 + Type pgtype.Int4 +} + +func (q *Queries) CreateRawGame(ctx context.Context, arg CreateRawGameParams) error { + _, err := q.db.Exec(ctx, createRawGame, + arg.Uuid, + arg.Request, + arg.History, + arg.Quickdata, + arg.Timers, + arg.GameEndReason, + arg.Type, + ) + return err +} + +const gameExists = `-- name: GameExists :one +SELECT EXISTS ( + SELECT 1 FROM games WHERE uuid = $1 +) AS exists +` + +func (q *Queries) GameExists(ctx context.Context, uuid pgtype.Text) (bool, error) { + row := q.db.QueryRow(ctx, gameExists, uuid) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const getGameBasicInfo = `-- name: GetGameBasicInfo :one +SELECT id, uuid, game_end_reason, migration_status, created_at, updated_at, type +FROM games WHERE uuid = $1 +` + +type GetGameBasicInfoRow struct { + ID int32 + Uuid pgtype.Text + GameEndReason pgtype.Int4 + MigrationStatus pgtype.Int2 + CreatedAt pgtype.Timestamptz + UpdatedAt pgtype.Timestamptz + Type pgtype.Int4 +} + +func (q *Queries) GetGameBasicInfo(ctx context.Context, uuid pgtype.Text) (GetGameBasicInfoRow, error) { + row := q.db.QueryRow(ctx, getGameBasicInfo, uuid) + var i GetGameBasicInfoRow + err := row.Scan( + &i.ID, + &i.Uuid, + &i.GameEndReason, + &i.MigrationStatus, + &i.CreatedAt, + &i.UpdatedAt, + &i.Type, + ) + return i, err +} + +const getGameFullData = `-- name: GetGameFullData :one +SELECT id, created_at, updated_at, deleted_at, uuid, player0_id, player1_id, timers, started, game_end_reason, winner_idx, loser_idx, request, history, stats, quickdata, tournament_data, tournament_id, ready_flag, meta_events, type, game_request, history_in_s3, migration_status FROM games WHERE uuid = $1 +` + +func (q *Queries) GetGameFullData(ctx context.Context, uuid pgtype.Text) (Game, error) { + row := q.db.QueryRow(ctx, getGameFullData, uuid) var i Game err := row.Scan( &i.ID, @@ -42,15 +197,40 @@ func (q *Queries) GetGame(ctx context.Context, uuid pgtype.Text) (Game, error) { &i.Type, &i.GameRequest, &i.HistoryInS3, + &i.MigrationStatus, ) return i, err } -const getGameOwner = `-- name: GetGameOwner :one +const getGameMetadata = `-- name: GetGameMetadata :one +SELECT game_uuid, created_at, game_request, tournament_data +FROM game_metadata +WHERE game_uuid = $1 +` + +type GetGameMetadataRow struct { + GameUuid string + CreatedAt pgtype.Timestamptz + GameRequest []byte + TournamentData []byte +} -SELECT +func (q *Queries) GetGameMetadata(ctx context.Context, gameUuid string) (GetGameMetadataRow, error) { + row := q.db.QueryRow(ctx, getGameMetadata, gameUuid) + var i GetGameMetadataRow + err := row.Scan( + &i.GameUuid, + &i.CreatedAt, + &i.GameRequest, + &i.TournamentData, + ) + return i, err +} + +const getGameOwner = `-- name: GetGameOwner :one +SELECT agm.creator_uuid, - u.username + u.username FROM annotated_game_metadata agm JOIN users u ON agm.creator_uuid = u.uuid WHERE agm.game_uuid = $1 @@ -61,10 +241,810 @@ type GetGameOwnerRow struct { Username pgtype.Text } -// this is not even a uuid, sigh. func (q *Queries) GetGameOwner(ctx context.Context, gameUuid string) (GetGameOwnerRow, error) { row := q.db.QueryRow(ctx, getGameOwner, gameUuid) var i GetGameOwnerRow err := row.Scan(&i.CreatorUuid, &i.Username) return i, err } + +const getGamePlayers = `-- name: GetGamePlayers :many +SELECT player_id, player_index +FROM game_players +WHERE game_uuid = $1 +ORDER BY player_index +` + +type GetGamePlayersRow struct { + PlayerID int32 + PlayerIndex int16 +} + +func (q *Queries) GetGamePlayers(ctx context.Context, gameUuid string) ([]GetGamePlayersRow, error) { + rows, err := q.db.Query(ctx, getGamePlayers, gameUuid) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetGamePlayersRow + for rows.Next() { + var i GetGamePlayersRow + if err := rows.Scan(&i.PlayerID, &i.PlayerIndex); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getHistory = `-- name: GetHistory :one +SELECT history FROM games +WHERE uuid = $1 +` + +func (q *Queries) GetHistory(ctx context.Context, uuid pgtype.Text) ([]byte, error) { + row := q.db.QueryRow(ctx, getHistory, uuid) + var history []byte + err := row.Scan(&history) + return history, err +} + +const getLiveGameMetadata = `-- name: GetLiveGameMetadata :one +SELECT uuid, quickdata, game_end_reason, winner_idx, request, created_at, updated_at, + tournament_data, tournament_id, type +FROM games +WHERE uuid = $1 +` + +type GetLiveGameMetadataRow struct { + Uuid pgtype.Text + Quickdata entity.Quickdata + GameEndReason pgtype.Int4 + WinnerIdx pgtype.Int4 + Request []byte + CreatedAt pgtype.Timestamptz + UpdatedAt pgtype.Timestamptz + TournamentData entity.TournamentData + TournamentID pgtype.Text + Type pgtype.Int4 +} + +func (q *Queries) GetLiveGameMetadata(ctx context.Context, uuid pgtype.Text) (GetLiveGameMetadataRow, error) { + row := q.db.QueryRow(ctx, getLiveGameMetadata, uuid) + var i GetLiveGameMetadataRow + err := row.Scan( + &i.Uuid, + &i.Quickdata, + &i.GameEndReason, + &i.WinnerIdx, + &i.Request, + &i.CreatedAt, + &i.UpdatedAt, + &i.TournamentData, + &i.TournamentID, + &i.Type, + ) + return i, err +} + +const getPastGame = `-- name: GetPastGame :one +SELECT gid, created_at, game_end_reason, winner_idx, game_document, stats, quickdata, type FROM past_games WHERE gid = $1 AND created_at = $2 +` + +type GetPastGameParams struct { + Gid string + CreatedAt pgtype.Timestamptz +} + +func (q *Queries) GetPastGame(ctx context.Context, arg GetPastGameParams) (PastGame, error) { + row := q.db.QueryRow(ctx, getPastGame, arg.Gid, arg.CreatedAt) + var i PastGame + err := row.Scan( + &i.Gid, + &i.CreatedAt, + &i.GameEndReason, + &i.WinnerIdx, + &i.GameDocument, + &i.Stats, + &i.Quickdata, + &i.Type, + ) + return i, err +} + +const getPastGameMetadata = `-- name: GetPastGameMetadata :one +SELECT pg.game_end_reason, pg.winner_idx, gm.game_request, pg.quickdata, pg.type, gm.tournament_data +FROM past_games pg +JOIN game_metadata gm ON gm.game_uuid = pg.gid +WHERE pg.gid = $1 AND pg.created_at = $2 +` + +type GetPastGameMetadataParams struct { + Gid string + CreatedAt pgtype.Timestamptz +} + +type GetPastGameMetadataRow struct { + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + Quickdata entity.Quickdata + Type int16 + TournamentData []byte +} + +func (q *Queries) GetPastGameMetadata(ctx context.Context, arg GetPastGameMetadataParams) (GetPastGameMetadataRow, error) { + row := q.db.QueryRow(ctx, getPastGameMetadata, arg.Gid, arg.CreatedAt) + var i GetPastGameMetadataRow + err := row.Scan( + &i.GameEndReason, + &i.WinnerIdx, + &i.GameRequest, + &i.Quickdata, + &i.Type, + &i.TournamentData, + ) + return i, err +} + +const getRecentGamesByUsername = `-- name: GetRecentGamesByUsername :many +SELECT gp.game_uuid, gp.score, gp.opponent_score, gp.won, gp.game_end_reason, + gp.created_at, gp.game_type, u.username as opponent_username, + COALESCE(pg.quickdata, '{}') as quickdata, + gm.game_request, + gm.tournament_data, + COALESCE(pg.winner_idx, CASE WHEN gp.won = true THEN gp.player_index + WHEN gp.won = false THEN (1 - gp.player_index) + ELSE -1 END) as winner_idx +FROM game_players gp +JOIN users u ON u.id = gp.opponent_id +JOIN users player ON player.id = gp.player_id +JOIN game_metadata gm ON gm.game_uuid = gp.game_uuid +LEFT JOIN past_games pg ON pg.gid = gp.game_uuid +WHERE LOWER(player.username) = LOWER($1) +ORDER BY gp.created_at DESC +LIMIT $3 OFFSET $2 +` + +type GetRecentGamesByUsernameParams struct { + Username string + OffsetGames int32 + NumGames int32 +} + +type GetRecentGamesByUsernameRow struct { + GameUuid string + Score int32 + OpponentScore int32 + Won pgtype.Bool + GameEndReason int16 + CreatedAt pgtype.Timestamptz + GameType int16 + OpponentUsername pgtype.Text + Quickdata entity.Quickdata + GameRequest []byte + TournamentData []byte + WinnerIdx pgtype.Int2 +} + +func (q *Queries) GetRecentGamesByUsername(ctx context.Context, arg GetRecentGamesByUsernameParams) ([]GetRecentGamesByUsernameRow, error) { + rows, err := q.db.Query(ctx, getRecentGamesByUsername, arg.Username, arg.OffsetGames, arg.NumGames) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetRecentGamesByUsernameRow + for rows.Next() { + var i GetRecentGamesByUsernameRow + if err := rows.Scan( + &i.GameUuid, + &i.Score, + &i.OpponentScore, + &i.Won, + &i.GameEndReason, + &i.CreatedAt, + &i.GameType, + &i.OpponentUsername, + &i.Quickdata, + &i.GameRequest, + &i.TournamentData, + &i.WinnerIdx, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getRecentGamesByUsernameOld = `-- name: GetRecentGamesByUsernameOld :many +SELECT g.uuid as game_uuid, + CASE WHEN u1.username = $1 THEN (g.quickdata->'finalScores'->>0)::int + ELSE (g.quickdata->'finalScores'->>1)::int END as score, + CASE WHEN u1.username = $1 THEN (g.quickdata->'finalScores'->>1)::int + ELSE (g.quickdata->'finalScores'->>0)::int END as opponent_score, + CASE WHEN g.winner_idx = 0 AND u1.username = $1 THEN true + WHEN g.winner_idx = 1 AND u2.username = $1 THEN true + WHEN g.winner_idx = -1 THEN NULL + ELSE false END as won, + g.game_end_reason, + g.created_at, + g.type as game_type, + CASE WHEN u1.username = $1 THEN u2.username + ELSE u1.username END as opponent_username, + g.quickdata, + g.request as game_request, + g.winner_idx +FROM games g +LEFT JOIN users u1 ON g.player0_id = u1.id +LEFT JOIN users u2 ON g.player1_id = u2.id +WHERE (LOWER(u1.username) = LOWER($1) OR LOWER(u2.username) = LOWER($1)) + AND g.game_end_reason > 0 -- only ended games +ORDER BY g.created_at DESC +LIMIT $3 OFFSET $2 +` + +type GetRecentGamesByUsernameOldParams struct { + Username pgtype.Text + OffsetGames int32 + NumGames int32 +} + +type GetRecentGamesByUsernameOldRow struct { + GameUuid pgtype.Text + Score int32 + OpponentScore int32 + Won bool + GameEndReason pgtype.Int4 + CreatedAt pgtype.Timestamptz + GameType pgtype.Int4 + OpponentUsername interface{} + Quickdata entity.Quickdata + GameRequest []byte + WinnerIdx pgtype.Int4 +} + +// Backward-compatible query that reads from games table +func (q *Queries) GetRecentGamesByUsernameOld(ctx context.Context, arg GetRecentGamesByUsernameOldParams) ([]GetRecentGamesByUsernameOldRow, error) { + rows, err := q.db.Query(ctx, getRecentGamesByUsernameOld, arg.Username, arg.OffsetGames, arg.NumGames) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetRecentGamesByUsernameOldRow + for rows.Next() { + var i GetRecentGamesByUsernameOldRow + if err := rows.Scan( + &i.GameUuid, + &i.Score, + &i.OpponentScore, + &i.Won, + &i.GameEndReason, + &i.CreatedAt, + &i.GameType, + &i.OpponentUsername, + &i.Quickdata, + &i.GameRequest, + &i.WinnerIdx, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getRecentTourneyGames = `-- name: GetRecentTourneyGames :many +SELECT pg.gid, pg.quickdata, gm.game_request, pg.winner_idx, pg.game_end_reason, + pg.created_at, pg.type, gm.tournament_data +FROM past_games pg +JOIN game_metadata gm ON gm.game_uuid = pg.gid +WHERE gm.tournament_data->>'Id' = $1::text +ORDER BY pg.created_at DESC +LIMIT $3 OFFSET $2 +` + +type GetRecentTourneyGamesParams struct { + TourneyID string + OffsetGames int32 + NumGames int32 +} + +type GetRecentTourneyGamesRow struct { + Gid string + Quickdata entity.Quickdata + GameRequest []byte + WinnerIdx pgtype.Int2 + GameEndReason int16 + CreatedAt pgtype.Timestamptz + Type int16 + TournamentData []byte +} + +func (q *Queries) GetRecentTourneyGames(ctx context.Context, arg GetRecentTourneyGamesParams) ([]GetRecentTourneyGamesRow, error) { + rows, err := q.db.Query(ctx, getRecentTourneyGames, arg.TourneyID, arg.OffsetGames, arg.NumGames) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetRecentTourneyGamesRow + for rows.Next() { + var i GetRecentTourneyGamesRow + if err := rows.Scan( + &i.Gid, + &i.Quickdata, + &i.GameRequest, + &i.WinnerIdx, + &i.GameEndReason, + &i.CreatedAt, + &i.Type, + &i.TournamentData, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getRecentTourneyGamesOld = `-- name: GetRecentTourneyGamesOld :many +SELECT g.uuid as gid, g.quickdata, g.request as game_request, g.winner_idx, g.game_end_reason, + g.created_at, g.type, g.tournament_data +FROM games g +WHERE g.tournament_id = $1::text + AND g.game_end_reason > 0 -- only ended games +ORDER BY g.created_at DESC +LIMIT $3 OFFSET $2 +` + +type GetRecentTourneyGamesOldParams struct { + TourneyID string + OffsetGames int32 + NumGames int32 +} + +type GetRecentTourneyGamesOldRow struct { + Gid pgtype.Text + Quickdata entity.Quickdata + GameRequest []byte + WinnerIdx pgtype.Int4 + GameEndReason pgtype.Int4 + CreatedAt pgtype.Timestamptz + Type pgtype.Int4 + TournamentData entity.TournamentData +} + +// Backward-compatible query that reads from games table +func (q *Queries) GetRecentTourneyGamesOld(ctx context.Context, arg GetRecentTourneyGamesOldParams) ([]GetRecentTourneyGamesOldRow, error) { + rows, err := q.db.Query(ctx, getRecentTourneyGamesOld, arg.TourneyID, arg.OffsetGames, arg.NumGames) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetRecentTourneyGamesOldRow + for rows.Next() { + var i GetRecentTourneyGamesOldRow + if err := rows.Scan( + &i.Gid, + &i.Quickdata, + &i.GameRequest, + &i.WinnerIdx, + &i.GameEndReason, + &i.CreatedAt, + &i.Type, + &i.TournamentData, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getRematchStreak = `-- name: GetRematchStreak :many +SELECT DISTINCT game_uuid as gid, + CASE WHEN won = true THEN player_index + WHEN won = false THEN (1 - player_index) + ELSE -1 END as winner_idx, + created_at +FROM game_players +WHERE original_request_id = $1::text + AND game_end_reason <> 5 -- no aborted games + -- note that cancelled games aren't saved in this table + -- and neither are ongoing games. +ORDER BY created_at DESC +` + +type GetRematchStreakRow struct { + Gid string + WinnerIdx int32 + CreatedAt pgtype.Timestamptz +} + +func (q *Queries) GetRematchStreak(ctx context.Context, origReqID string) ([]GetRematchStreakRow, error) { + rows, err := q.db.Query(ctx, getRematchStreak, origReqID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetRematchStreakRow + for rows.Next() { + var i GetRematchStreakRow + if err := rows.Scan(&i.Gid, &i.WinnerIdx, &i.CreatedAt); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getRematchStreakOld = `-- name: GetRematchStreakOld :many +SELECT DISTINCT uuid as gid, + winner_idx, + created_at +FROM games +WHERE quickdata->>'o' = $1::text + AND game_end_reason <> 5 -- no aborted games + AND game_end_reason <> 3 -- no cancelled games + AND game_end_reason > 0 -- only ended games +ORDER BY created_at DESC +` + +type GetRematchStreakOldRow struct { + Gid pgtype.Text + WinnerIdx pgtype.Int4 + CreatedAt pgtype.Timestamptz +} + +// Backward-compatible query that reads from games table instead of game_players +func (q *Queries) GetRematchStreakOld(ctx context.Context, origReqID string) ([]GetRematchStreakOldRow, error) { + rows, err := q.db.Query(ctx, getRematchStreakOld, origReqID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetRematchStreakOldRow + for rows.Next() { + var i GetRematchStreakOldRow + if err := rows.Scan(&i.Gid, &i.WinnerIdx, &i.CreatedAt); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertGameMetadata = `-- name: InsertGameMetadata :exec +INSERT INTO game_metadata ( + game_uuid, created_at, game_request, tournament_data +) VALUES ( + $1, $2, $3, $4 +) +` + +type InsertGameMetadataParams struct { + GameUuid string + CreatedAt pgtype.Timestamptz + GameRequest []byte + TournamentData []byte +} + +func (q *Queries) InsertGameMetadata(ctx context.Context, arg InsertGameMetadataParams) error { + _, err := q.db.Exec(ctx, insertGameMetadata, + arg.GameUuid, + arg.CreatedAt, + arg.GameRequest, + arg.TournamentData, + ) + return err +} + +const insertGamePlayer = `-- name: InsertGamePlayer :exec +INSERT INTO game_players ( + game_uuid, player_id, player_index, score, won, game_end_reason, + rating_before, rating_after, rating_delta, created_at, game_type, + opponent_id, opponent_score, original_request_id +) VALUES ( + $1, $2, $3, $4, $5, $6, + $7, $8, $9, $10, $11, + $12, $13, $14 +) +` + +type InsertGamePlayerParams struct { + GameUuid string + PlayerID int32 + PlayerIndex int16 + Score int32 + Won pgtype.Bool + GameEndReason int16 + RatingBefore pgtype.Int4 + RatingAfter pgtype.Int4 + RatingDelta pgtype.Int4 + CreatedAt pgtype.Timestamptz + GameType int16 + OpponentID int32 + OpponentScore int32 + OriginalRequestID pgtype.Text +} + +func (q *Queries) InsertGamePlayer(ctx context.Context, arg InsertGamePlayerParams) error { + _, err := q.db.Exec(ctx, insertGamePlayer, + arg.GameUuid, + arg.PlayerID, + arg.PlayerIndex, + arg.Score, + arg.Won, + arg.GameEndReason, + arg.RatingBefore, + arg.RatingAfter, + arg.RatingDelta, + arg.CreatedAt, + arg.GameType, + arg.OpponentID, + arg.OpponentScore, + arg.OriginalRequestID, + ) + return err +} + +const insertPastGame = `-- name: InsertPastGame :exec +INSERT INTO past_games ( + gid, created_at, game_end_reason, winner_idx, + game_document, stats, quickdata, type +) VALUES ( + $1, $2, $3, $4, + $5, $6, $7, $8 +) +` + +type InsertPastGameParams struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameDocument []byte + Stats entity.Stats + Quickdata entity.Quickdata + Type int16 +} + +func (q *Queries) InsertPastGame(ctx context.Context, arg InsertPastGameParams) error { + _, err := q.db.Exec(ctx, insertPastGame, + arg.Gid, + arg.CreatedAt, + arg.GameEndReason, + arg.WinnerIdx, + arg.GameDocument, + arg.Stats, + arg.Quickdata, + arg.Type, + ) + return err +} + +const listActiveGames = `-- name: ListActiveGames :many +SELECT quickdata, request, uuid, started, tournament_data +FROM games +WHERE game_end_reason = 0 +` + +type ListActiveGamesRow struct { + Quickdata entity.Quickdata + Request []byte + Uuid pgtype.Text + Started pgtype.Bool + TournamentData entity.TournamentData +} + +func (q *Queries) ListActiveGames(ctx context.Context) ([]ListActiveGamesRow, error) { + rows, err := q.db.Query(ctx, listActiveGames) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListActiveGamesRow + for rows.Next() { + var i ListActiveGamesRow + if err := rows.Scan( + &i.Quickdata, + &i.Request, + &i.Uuid, + &i.Started, + &i.TournamentData, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listActiveTournamentGames = `-- name: ListActiveTournamentGames :many +SELECT quickdata, request, uuid, started, tournament_data +FROM games +WHERE game_end_reason = 0 +AND tournament_id = $1 +` + +type ListActiveTournamentGamesRow struct { + Quickdata entity.Quickdata + Request []byte + Uuid pgtype.Text + Started pgtype.Bool + TournamentData entity.TournamentData +} + +func (q *Queries) ListActiveTournamentGames(ctx context.Context, tournamentID pgtype.Text) ([]ListActiveTournamentGamesRow, error) { + rows, err := q.db.Query(ctx, listActiveTournamentGames, tournamentID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListActiveTournamentGamesRow + for rows.Next() { + var i ListActiveTournamentGamesRow + if err := rows.Scan( + &i.Quickdata, + &i.Request, + &i.Uuid, + &i.Started, + &i.TournamentData, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAllIDs = `-- name: ListAllIDs :many +SELECT uuid FROM games +ORDER BY created_at ASC +` + +func (q *Queries) ListAllIDs(ctx context.Context) ([]pgtype.Text, error) { + rows, err := q.db.Query(ctx, listAllIDs) + if err != nil { + return nil, err + } + defer rows.Close() + var items []pgtype.Text + for rows.Next() { + var uuid pgtype.Text + if err := rows.Scan(&uuid); err != nil { + return nil, err + } + items = append(items, uuid) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const setReady = `-- name: SetReady :one +UPDATE games SET ready_flag = ready_flag | (1 << $1::integer) +WHERE uuid = $2 +RETURNING ready_flag +` + +type SetReadyParams struct { + PlayerIdx int32 + Uuid pgtype.Text +} + +func (q *Queries) SetReady(ctx context.Context, arg SetReadyParams) (pgtype.Int8, error) { + row := q.db.QueryRow(ctx, setReady, arg.PlayerIdx, arg.Uuid) + var ready_flag pgtype.Int8 + err := row.Scan(&ready_flag) + return ready_flag, err +} + +const updateGame = `-- name: UpdateGame :exec +UPDATE games +SET updated_at = $1, + player0_id = $2, + player1_id = $3, + timers = $4, + started = $5, + game_end_reason = $6, + winner_idx = $7, + loser_idx = $8, + request = $9, + history = $10, + stats = $11, + quickdata = $12, + tournament_data = $13, + tournament_id = $14, + ready_flag = $15, + meta_events = $16 +WHERE uuid = $17 +` + +type UpdateGameParams struct { + UpdatedAt pgtype.Timestamptz + Player0ID pgtype.Int4 + Player1ID pgtype.Int4 + Timers entity.Timers + Started pgtype.Bool + GameEndReason pgtype.Int4 + WinnerIdx pgtype.Int4 + LoserIdx pgtype.Int4 + Request []byte + History []byte + Stats entity.Stats + Quickdata entity.Quickdata + TournamentData entity.TournamentData + TournamentID pgtype.Text + ReadyFlag pgtype.Int8 + MetaEvents entity.MetaEventData + Uuid pgtype.Text +} + +func (q *Queries) UpdateGame(ctx context.Context, arg UpdateGameParams) error { + _, err := q.db.Exec(ctx, updateGame, + arg.UpdatedAt, + arg.Player0ID, + arg.Player1ID, + arg.Timers, + arg.Started, + arg.GameEndReason, + arg.WinnerIdx, + arg.LoserIdx, + arg.Request, + arg.History, + arg.Stats, + arg.Quickdata, + arg.TournamentData, + arg.TournamentID, + arg.ReadyFlag, + arg.MetaEvents, + arg.Uuid, + ) + return err +} + +const updateGameMigrationStatus = `-- name: UpdateGameMigrationStatus :exec +UPDATE games +SET migration_status = $1, + updated_at = NOW() +WHERE uuid = $2 +` + +type UpdateGameMigrationStatusParams struct { + MigrationStatus pgtype.Int2 + Uuid pgtype.Text +} + +func (q *Queries) UpdateGameMigrationStatus(ctx context.Context, arg UpdateGameMigrationStatusParams) error { + _, err := q.db.Exec(ctx, updateGameMigrationStatus, arg.MigrationStatus, arg.Uuid) + return err +} diff --git a/pkg/stores/models/models.go b/pkg/stores/models/models.go index 6eae9cd8a..2866762d7 100644 --- a/pkg/stores/models/models.go +++ b/pkg/stores/models/models.go @@ -67,29 +67,30 @@ type Following struct { } type Game struct { - ID int32 - CreatedAt pgtype.Timestamptz - UpdatedAt pgtype.Timestamptz - DeletedAt pgtype.Timestamptz - Uuid pgtype.Text - Player0ID pgtype.Int4 - Player1ID pgtype.Int4 - Timers entity.Timers - Started pgtype.Bool - GameEndReason pgtype.Int4 - WinnerIdx pgtype.Int4 - LoserIdx pgtype.Int4 - Request entity.GameRequest - History []byte - Stats entity.Stats - Quickdata entity.Quickdata - TournamentData entity.TournamentData - TournamentID pgtype.Text - ReadyFlag pgtype.Int8 - MetaEvents entity.MetaEventData - Type pgtype.Int4 - GameRequest entity.GameRequest - HistoryInS3 bool + ID int32 + CreatedAt pgtype.Timestamptz + UpdatedAt pgtype.Timestamptz + DeletedAt pgtype.Timestamptz + Uuid pgtype.Text + Player0ID pgtype.Int4 + Player1ID pgtype.Int4 + Timers entity.Timers + Started pgtype.Bool + GameEndReason pgtype.Int4 + WinnerIdx pgtype.Int4 + LoserIdx pgtype.Int4 + Request []byte + History []byte + Stats entity.Stats + Quickdata entity.Quickdata + TournamentData entity.TournamentData + TournamentID pgtype.Text + ReadyFlag pgtype.Int8 + MetaEvents entity.MetaEventData + Type pgtype.Int4 + GameRequest []byte + HistoryInS3 bool + MigrationStatus pgtype.Int2 } type GameComment struct { @@ -107,10 +108,29 @@ type GameDocument struct { Document []byte } +type GameMetadatum struct { + GameUuid string + CreatedAt pgtype.Timestamptz + GameRequest []byte + TournamentData []byte + CreatedAtIdx pgtype.Timestamptz +} + type GamePlayer struct { - GameID int32 - PlayerID int32 - PlayerIndex pgtype.Int2 + GameUuid string + PlayerID int32 + PlayerIndex int16 + Score int32 + Won pgtype.Bool + GameEndReason int16 + RatingBefore pgtype.Int4 + RatingAfter pgtype.Int4 + RatingDelta pgtype.Int4 + CreatedAt pgtype.Timestamptz + GameType int16 + OpponentID int32 + OpponentScore int32 + OriginalRequestID pgtype.Text } type Integration struct { @@ -144,6 +164,940 @@ type Notoriousgame struct { Timestamp pgtype.Int8 } +type PastGame struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameDocument []byte + Stats entity.Stats + Quickdata entity.Quickdata + Type int16 +} + +type PastGames202008 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202009 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202010 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202011 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202012 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202101 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202102 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202103 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202104 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202105 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202106 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202107 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202108 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202109 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202110 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202111 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202112 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202201 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202202 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202203 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202204 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202205 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202206 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202207 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202208 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202209 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202210 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202211 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202212 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202301 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202302 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202303 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202304 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202305 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202306 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202307 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202308 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202309 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202310 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202311 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202312 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202401 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202402 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202403 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202404 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202405 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202406 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202407 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202408 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202409 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202410 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202411 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202412 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202501 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202502 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202503 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202504 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202505 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202506 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202507 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202508 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202509 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202510 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202511 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202512 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202601 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202602 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202603 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202604 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202605 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + +type PastGames202606 struct { + Gid string + CreatedAt pgtype.Timestamptz + GameEndReason int16 + WinnerIdx pgtype.Int2 + GameRequest []byte + GameDocument []byte + Stats []byte + Quickdata []byte + Type int16 + TournamentData []byte +} + type Permission struct { ID int32 Code string diff --git a/pkg/stores/models/puzzles.sql.go b/pkg/stores/models/puzzles.sql.go index fea1fabb9..a38ebf419 100644 --- a/pkg/stores/models/puzzles.sql.go +++ b/pkg/stores/models/puzzles.sql.go @@ -9,24 +9,24 @@ import ( "context" "github.com/jackc/pgx/v5/pgtype" - "github.com/woogles-io/liwords/pkg/entity" ) const getPotentialPuzzleGames = `-- name: GetPotentialPuzzleGames :many -SELECT games.uuid FROM games -LEFT JOIN puzzles on puzzles.game_id = games.id +SELECT past_games.gid FROM past_games +JOIN games ON past_games.gid = games.uuid +JOIN game_metadata ON game_metadata.game_uuid = past_games.gid +LEFT JOIN puzzles ON puzzles.game_id = games.id WHERE puzzles.id IS NULL - AND games.created_at BETWEEN $1 AND $2 - AND (stats->'d1'->'Challenged Phonies'->'t' = '0') - AND (stats->'d2'->'Challenged Phonies'->'t' = '0') - AND (stats->'d1'->'Unchallenged Phonies'->'t' = '0') - AND (stats->'d2'->'Unchallenged Phonies'->'t' = '0') - AND games.request LIKE $3 -- %lexicon% - AND games.request NOT LIKE '%classic_super%' - AND games.request NOT LIKE '%wordsmog%' + AND past_games.created_at BETWEEN $1 AND $2 + AND (past_games.stats->'d1'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d1'->'Unchallenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Unchallenged Phonies'->'t' = '0') + AND game_metadata.game_request->>'lexicon' = $3::text + AND game_metadata.game_request->'rules'->>'variantName' = 'classic' -- 0: none, 5: aborted, 7: canceled - AND game_end_reason not in (0, 5, 7) - AND type = 0 + AND past_games.game_end_reason NOT IN (0, 5, 7) + AND past_games.type = 0 ORDER BY games.id DESC LIMIT $4 OFFSET $5 @@ -35,16 +35,16 @@ WHERE puzzles.id IS NULL type GetPotentialPuzzleGamesParams struct { CreatedAt pgtype.Timestamptz CreatedAt_2 pgtype.Timestamptz - Request entity.GameRequest + Column3 string Limit int32 Offset int32 } -func (q *Queries) GetPotentialPuzzleGames(ctx context.Context, arg GetPotentialPuzzleGamesParams) ([]pgtype.Text, error) { +func (q *Queries) GetPotentialPuzzleGames(ctx context.Context, arg GetPotentialPuzzleGamesParams) ([]string, error) { rows, err := q.db.Query(ctx, getPotentialPuzzleGames, arg.CreatedAt, arg.CreatedAt_2, - arg.Request, + arg.Column3, arg.Limit, arg.Offset, ) @@ -52,13 +52,13 @@ func (q *Queries) GetPotentialPuzzleGames(ctx context.Context, arg GetPotentialP return nil, err } defer rows.Close() - var items []pgtype.Text + var items []string for rows.Next() { - var uuid pgtype.Text - if err := rows.Scan(&uuid); err != nil { + var gid string + if err := rows.Scan(&gid); err != nil { return nil, err } - items = append(items, uuid) + items = append(items, gid) } if err := rows.Err(); err != nil { return nil, err @@ -68,21 +68,22 @@ func (q *Queries) GetPotentialPuzzleGames(ctx context.Context, arg GetPotentialP const getPotentialPuzzleGamesAvoidBots = `-- name: GetPotentialPuzzleGamesAvoidBots :many -SELECT games.uuid FROM games -LEFT JOIN puzzles on puzzles.game_id = games.id +SELECT past_games.gid FROM past_games +JOIN games ON past_games.gid = games.uuid +JOIN game_metadata ON game_metadata.game_uuid = past_games.gid +LEFT JOIN puzzles ON puzzles.game_id = games.id WHERE puzzles.id IS NULL - AND games.created_at BETWEEN $1 AND $2 - AND (stats->'d1'->'Challenged Phonies'->'t' = '0') - AND (stats->'d2'->'Challenged Phonies'->'t' = '0') - AND (stats->'d1'->'Unchallenged Phonies'->'t' = '0') - AND (stats->'d2'->'Unchallenged Phonies'->'t' = '0') - AND games.request LIKE $3 -- %lexicon% - AND games.request NOT LIKE '%classic_super%' - AND games.request NOT LIKE '%wordsmog%' + AND past_games.created_at BETWEEN $1 AND $2 + AND (past_games.stats->'d1'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Challenged Phonies'->'t' = '0') + AND (past_games.stats->'d1'->'Unchallenged Phonies'->'t' = '0') + AND (past_games.stats->'d2'->'Unchallenged Phonies'->'t' = '0') + AND game_metadata.game_request->>'lexicon' = $3::text + AND game_metadata.game_request->'rules'->>'variantName' = 'classic' -- 0: none, 5: aborted, 7: canceled - AND game_end_reason not in (0, 5, 7) - AND NOT (quickdata @> '{"pi": [{"is_bot": true}]}'::jsonb) - AND type = 0 + AND past_games.game_end_reason NOT IN (0, 5, 7) + AND NOT (past_games.quickdata @> '{"pi": [{"is_bot": true}]}'::jsonb) + AND past_games.type = 0 ORDER BY games.id DESC LIMIT $4 OFFSET $5 @@ -91,17 +92,17 @@ WHERE puzzles.id IS NULL type GetPotentialPuzzleGamesAvoidBotsParams struct { CreatedAt pgtype.Timestamptz CreatedAt_2 pgtype.Timestamptz - Request entity.GameRequest + Column3 string Limit int32 Offset int32 } // puzzle generation -func (q *Queries) GetPotentialPuzzleGamesAvoidBots(ctx context.Context, arg GetPotentialPuzzleGamesAvoidBotsParams) ([]pgtype.Text, error) { +func (q *Queries) GetPotentialPuzzleGamesAvoidBots(ctx context.Context, arg GetPotentialPuzzleGamesAvoidBotsParams) ([]string, error) { rows, err := q.db.Query(ctx, getPotentialPuzzleGamesAvoidBots, arg.CreatedAt, arg.CreatedAt_2, - arg.Request, + arg.Column3, arg.Limit, arg.Offset, ) @@ -109,13 +110,13 @@ func (q *Queries) GetPotentialPuzzleGamesAvoidBots(ctx context.Context, arg GetP return nil, err } defer rows.Close() - var items []pgtype.Text + var items []string for rows.Next() { - var uuid pgtype.Text - if err := rows.Scan(&uuid); err != nil { + var gid string + if err := rows.Scan(&gid); err != nil { return nil, err } - items = append(items, uuid) + items = append(items, gid) } if err := rows.Err(); err != nil { return nil, err diff --git a/pkg/stores/puzzles/db.go b/pkg/stores/puzzles/db.go index bac3a512c..4c666aa67 100644 --- a/pkg/stores/puzzles/db.go +++ b/pkg/stores/puzzles/db.go @@ -6,7 +6,6 @@ import ( "fmt" "time" - macondogame "github.com/domino14/macondo/game" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgtype" "github.com/jackc/pgx/v5/pgxpool" @@ -753,28 +752,30 @@ func (s *DBStore) GetPotentialPuzzleGames(ctx context.Context, time1, time2 time ids, err := s.queries.GetPotentialPuzzleGamesAvoidBots(ctx, models.GetPotentialPuzzleGamesAvoidBotsParams{ CreatedAt: pgtype.Timestamptz{Valid: true, Time: time1}, CreatedAt_2: pgtype.Timestamptz{Valid: true, Time: time2}, - Request: entity.GameRequest{GameRequest: &ipc.GameRequest{ - Lexicon: lexicon, - Rules: &ipc.GameRules{ - VariantName: string(macondogame.VarClassic), - }}}, - Limit: int32(limit), - Offset: 0, + Column3: lexicon, + Limit: int32(limit), + Offset: 0, }) - return ids, err + // Convert []string to []pgtype.Text for backwards compatibility + result := make([]pgtype.Text, len(ids)) + for i, id := range ids { + result[i] = pgtype.Text{String: id, Valid: true} + } + return result, err } ids, err := s.queries.GetPotentialPuzzleGames(ctx, models.GetPotentialPuzzleGamesParams{ CreatedAt: pgtype.Timestamptz{Valid: true, Time: time1}, CreatedAt_2: pgtype.Timestamptz{Valid: true, Time: time2}, - Request: entity.GameRequest{GameRequest: &ipc.GameRequest{ - Lexicon: lexicon, - Rules: &ipc.GameRules{ - VariantName: string(macondogame.VarClassic), - }}}, - Limit: int32(limit), - Offset: 0, + Column3: lexicon, + Limit: int32(limit), + Offset: 0, }) - return ids, err + // Convert []string to []pgtype.Text for backwards compatibility + result := make([]pgtype.Text, len(ids)) + for i, id := range ids { + result[i] = pgtype.Text{String: id, Valid: true} + } + return result, err } func getUserRating(ctx context.Context, tx pgx.Tx, userID string, ratingKey entity.VariantKey) (*entity.SingleRating, error) { diff --git a/scripts/migrations/historical_games/README.md b/scripts/migrations/historical_games/README.md new file mode 100644 index 000000000..29d3a51da --- /dev/null +++ b/scripts/migrations/historical_games/README.md @@ -0,0 +1,103 @@ +# Historical Games Migration + +This directory contains tools for migrating completed games from the main `games` table to the new partitioned `past_games` and `game_players` tables. + +## Overview + +The migration process moves completed games (games with `game_end_reason != 0`) from the `games` table to: +- `past_games`: Stores game metadata and game documents (JSONB format for efficient querying) +- `game_players`: Stores denormalized player data for fast recent games queries + +## Files + +- `main.go`: Main migration tool written in Go +- `run_historical_migration.sh`: Shell script wrapper for easy execution +- `README.md`: This documentation + +## Usage + +### Quick Start + +```bash +# Run a dry run first to see what would be migrated +./run_historical_migration.sh + +# Run the actual migration +DRY_RUN=false ./run_historical_migration.sh +``` + +### Environment Variables + +- `CONFIG_FILE`: Path to liwords config file (default: uses default config) +- `BATCH_SIZE`: Number of games to process in each batch (default: 100) +- `START_OFFSET`: Starting offset for processing (default: 0) +- `LIMIT`: Maximum number of games to process, 0 = no limit (default: 0) +- `DRY_RUN`: Set to "false" to actually perform migration (default: true) +- `VERBOSE`: Set to "true" for verbose logging (default: false) + +### Examples + +```bash +# Use a specific config file +CONFIG_FILE=/path/to/config.json DRY_RUN=false ./run_historical_migration.sh + +# Migrate first 1000 games only +LIMIT=1000 DRY_RUN=false ./run_historical_migration.sh + +# Resume migration from offset 5000 +START_OFFSET=5000 DRY_RUN=false ./run_historical_migration.sh + +# Use larger batches for faster processing +BATCH_SIZE=500 DRY_RUN=false ./run_historical_migration.sh + +# Verbose logging +VERBOSE=true DRY_RUN=false ./run_historical_migration.sh +``` + +## Safety Features + +1. **Dry Run by Default**: The script runs in dry-run mode by default to prevent accidental data migration +2. **Batch Processing**: Processes games in configurable batches to avoid overwhelming the database +3. **Transaction Safety**: Each game migration is wrapped in a transaction +4. **Progress Reporting**: Shows progress every 100 games +5. **Error Handling**: Continues processing even if individual games fail, reporting errors at the end + +## Migration Process + +For each completed game, the tool: + +1. **Reads** the game data from the `games` table +2. **Converts** the protobuf game history to a GameDocument (JSONB format) +3. **Inserts** the game data into `past_games` table +4. **Inserts** player records into `game_players` table (one record per player) +5. **Updates** the `migration_status` field in the original `games` table to mark it as migrated + +## Post-Migration Cleanup + +The migration tool includes commented code to clear migrated data from the `games` table to save space. Uncomment the cleanup section in `main.go` if you want to remove the original data after migration. + +## Monitoring + +- Check migration progress in the logs +- Query migration status: `SELECT migration_status, COUNT(*) FROM games GROUP BY migration_status;` +- Verify migrated data: `SELECT COUNT(*) FROM past_games; SELECT COUNT(*) FROM game_players;` + +## Rollback + +If you need to rollback the migration: + +```sql +-- Remove migrated data +DELETE FROM past_games WHERE gid IN (SELECT uuid FROM games WHERE migration_status >= 1); +DELETE FROM game_players WHERE game_uuid IN (SELECT uuid FROM games WHERE migration_status >= 1); + +-- Reset migration status +UPDATE games SET migration_status = NULL WHERE migration_status >= 1; +``` + +## Performance Considerations + +- Run during low-traffic periods +- Consider increasing `BATCH_SIZE` for faster processing on powerful systems +- Monitor database performance during migration +- The tool includes small delays between batches to avoid overloading the database \ No newline at end of file diff --git a/scripts/migrations/historical_games/main.go b/scripts/migrations/historical_games/main.go new file mode 100644 index 000000000..a244153ca --- /dev/null +++ b/scripts/migrations/historical_games/main.go @@ -0,0 +1,442 @@ +package main + +import ( + "context" + "database/sql" + "encoding/json" + "flag" + "fmt" + "log" + "os" + "time" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" + + macondogame "github.com/domino14/macondo/game" + macondopb "github.com/domino14/macondo/gen/api/proto/macondo" + "github.com/woogles-io/liwords/pkg/config" + "github.com/woogles-io/liwords/pkg/entity" + "github.com/woogles-io/liwords/pkg/entity/utilities" + "github.com/woogles-io/liwords/pkg/stores/common" + pb "github.com/woogles-io/liwords/rpc/api/proto/ipc" +) + +// GameRequest utility functions for handling both proto and protojson formats + +// ParseGameRequest parses GameRequest from bytes, trying proto format first, then protojson +func ParseGameRequest(data []byte) (*pb.GameRequest, error) { + if len(data) == 0 { + return &pb.GameRequest{}, nil + } + + gr := &pb.GameRequest{} + + // Try proto format first (binary data from live games) + err := proto.Unmarshal(data, gr) + if err == nil { + return gr, nil + } + + // Fall back to protojson format (from past games) + err = protojson.Unmarshal(data, gr) + if err != nil { + return nil, fmt.Errorf("failed to parse GameRequest as both proto and protojson: %w", err) + } + + return gr, nil +} + +// MarshalGameRequestAsJSON marshals GameRequest as protojson for past games table +func MarshalGameRequestAsJSON(gr *pb.GameRequest) ([]byte, error) { + if gr == nil { + return nil, fmt.Errorf("GameRequest is nil") + } + return protojson.Marshal(gr) +} + +type GameRow struct { + UUID string + CreatedAt time.Time + GameEndReason int + WinnerIdx int + GameRequest []byte // Binary proto data from games.request + History []byte + Stats json.RawMessage + Quickdata json.RawMessage + Type int + TournamentData json.RawMessage + Player0ID sql.NullInt32 + Player1ID sql.NullInt32 +} + +func main() { + var ( + configFile = flag.String("config", "", "Config file path") + batchSize = flag.Int("batch", 100, "Batch size for processing games") + startOffset = flag.Int("offset", 0, "Starting offset for processing") + limit = flag.Int("limit", 0, "Limit number of games to process (0 = no limit)") + dryRun = flag.Bool("dry-run", false, "Dry run mode - don't actually migrate") + verbose = flag.Bool("verbose", false, "Verbose logging") + ) + flag.Parse() + + // Load config + cfg := &config.Config{} + if *configFile != "" { + cfg.Load([]string{"-config", *configFile}) + } else { + cfg.Load(os.Args[1:]) + } + + ctx := context.Background() + + // Set up database connection + dbPool, err := common.OpenDB(cfg.DBHost, cfg.DBPort, cfg.DBName, cfg.DBUser, cfg.DBPassword, cfg.DBSSLMode) + if err != nil { + log.Fatalf("Failed to connect to database: %v", err) + } + defer dbPool.Close() + + // Get total count of games to migrate + var totalCount int + err = dbPool.QueryRow(ctx, ` + SELECT COUNT(*) + FROM games + WHERE game_end_reason != 0 + AND (migration_status IS NULL OR migration_status = 0) + `).Scan(&totalCount) + if err != nil { + log.Fatalf("Failed to get game count: %v", err) + } + + log.Printf("Found %d games to migrate", totalCount) + if *limit > 0 && *limit < totalCount { + totalCount = *limit + log.Printf("Limiting to %d games", totalCount) + } + + processed := 0 + errors := 0 + offset := *startOffset + + for processed < totalCount { + // Get batch of games + rows, err := dbPool.Query(ctx, ` + SELECT uuid, created_at, game_end_reason, winner_idx, + request, history, stats, quickdata, type, tournament_data, + player0_id, player1_id + FROM games + WHERE game_end_reason != 0 + AND (migration_status IS NULL OR migration_status = 0) + ORDER BY created_at + LIMIT $1 OFFSET $2 + `, *batchSize, offset) + if err != nil { + log.Fatalf("Failed to query games: %v", err) + } + + games := []GameRow{} + for rows.Next() { + var g GameRow + err := rows.Scan(&g.UUID, &g.CreatedAt, &g.GameEndReason, &g.WinnerIdx, + &g.GameRequest, &g.History, &g.Stats, &g.Quickdata, &g.Type, + &g.TournamentData, &g.Player0ID, &g.Player1ID) + if err != nil { + log.Printf("Error scanning row: %v", err) + errors++ + continue + } + games = append(games, g) + } + rows.Close() + + if len(games) == 0 { + break + } + + // Process each game in the batch + for _, game := range games { + if *verbose { + log.Printf("Processing game %s...", game.UUID) + } + + err := migrateGame(ctx, dbPool, cfg, game, *dryRun) + if err != nil { + log.Printf("Error migrating game %s: %v", game.UUID, err) + errors++ + } else { + processed++ + if processed%100 == 0 { + log.Printf("Progress: %d/%d games migrated", processed, totalCount) + } + } + } + + offset += *batchSize + + // Small delay to avoid overloading the database + time.Sleep(100 * time.Millisecond) + } + + log.Printf("Migration complete. Processed: %d, Errors: %d", processed, errors) +} + +func migrateGame(ctx context.Context, db *pgxpool.Pool, cfg *config.Config, game GameRow, dryRun bool) error { + // Parse history to get game details + hist := &macondopb.GameHistory{} + err := proto.Unmarshal(game.History, hist) + if err != nil { + return fmt.Errorf("failed to unmarshal history: %v", err) + } + + // Parse game request using utility function + grproto, err := ParseGameRequest(game.GameRequest) + if err != nil { + return fmt.Errorf("failed to parse game request: %v", err) + } + + // Create entity.GameRequest wrapper for compatibility + gameReq := entity.GameRequest{GameRequest: grproto} + + // Marshal as JSON for past_games table + grprotoJson, err := MarshalGameRequestAsJSON(grproto) + if err != nil { + return fmt.Errorf("failed to marshal game request: %v", err) + } + + // Parse other data + var quickdata entity.Quickdata + if err := json.Unmarshal(game.Quickdata, &quickdata); err != nil { + return fmt.Errorf("failed to parse quickdata: %v", err) + } + + var stats entity.Stats + if err := json.Unmarshal(game.Stats, &stats); err != nil { + return fmt.Errorf("failed to parse stats: %v", err) + } + + var tournamentData entity.TournamentData + if len(game.TournamentData) > 0 { + if err := json.Unmarshal(game.TournamentData, &tournamentData); err != nil { + return fmt.Errorf("failed to parse tournament data: %v", err) + } + } + + // Create macondo rules from game request + lexicon := hist.Lexicon + if lexicon == "" { + lexicon = gameReq.GameRequest.Lexicon + } + + rules, err := macondogame.NewBasicGameRules( + cfg.MacondoConfig(), lexicon, gameReq.GameRequest.Rules.BoardLayoutName, + gameReq.GameRequest.Rules.LetterDistributionName, macondogame.CrossScoreOnly, + macondogame.Variant(gameReq.GameRequest.Rules.VariantName)) + if err != nil { + return fmt.Errorf("failed to create game rules: %v", err) + } + + // Create macondo game from history + mcg, err := macondogame.NewFromHistory(hist, rules, len(hist.Events)) + if err != nil { + return fmt.Errorf("failed to create game from history: %v", err) + } + + // Create an entity.Game to use the proper conversion function + entGame := &entity.Game{ + Game: *mcg, + GameReq: &gameReq, + Stats: &stats, + Quickdata: &quickdata, + Type: pb.GameType(game.Type), + TournamentData: &tournamentData, + PlayerDBIDs: [2]uint{uint(game.Player0ID.Int32), uint(game.Player1ID.Int32)}, + CreatedAt: game.CreatedAt, + GameEndReason: pb.GameEndReason(game.GameEndReason), + WinnerIdx: game.WinnerIdx, + } + + // Convert to GameDocument using the proper utility function + doc, err := utilities.ToGameDocument(entGame, cfg) + if err != nil { + return fmt.Errorf("failed to convert to game document: %v", err) + } + + docBytes, err := protojson.Marshal(doc) + if err != nil { + return fmt.Errorf("failed to marshal game document: %v", err) + } + + // Get final scores + finalScores := quickdata.FinalScores + if len(finalScores) == 0 && len(hist.FinalScores) > 0 { + finalScores = hist.FinalScores + } + + if dryRun { + log.Printf("DRY RUN: Would migrate game %s with scores %v", game.UUID, finalScores) + return nil + } + + // Start transaction + tx, err := db.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + return fmt.Errorf("failed to start transaction: %v", err) + } + defer tx.Rollback(ctx) + + // Insert into game_metadata first + _, err = tx.Exec(ctx, ` + INSERT INTO game_metadata ( + game_uuid, created_at, game_request, tournament_data + ) VALUES ($1, $2, $3, $4) + `, game.UUID, game.CreatedAt, grprotoJson, game.TournamentData) + if err != nil { + return fmt.Errorf("failed to insert into game_metadata: %v", err) + } + + // Insert into past_games (without game_request and tournament_data) + _, err = tx.Exec(ctx, ` + INSERT INTO past_games ( + gid, created_at, game_end_reason, winner_idx, + game_document, stats, quickdata, type + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + `, game.UUID, game.CreatedAt, game.GameEndReason, game.WinnerIdx, + docBytes, game.Stats, game.Quickdata, game.Type) + if err != nil { + return fmt.Errorf("failed to insert into past_games: %v", err) + } + + // Insert into game_players for each player + if game.Player0ID.Valid && game.Player1ID.Valid { + // Extract common data for both players + originalRequestID := extractOriginalRequestID(game.Quickdata) + + // Player 0 + ratingBefore0, ratingAfter0, ratingDelta0 := extractRatingData(game.Quickdata, 0) + _, err = tx.Exec(ctx, ` + INSERT INTO game_players ( + game_uuid, player_id, player_index, score, won, game_end_reason, + rating_before, rating_after, rating_delta, created_at, game_type, + opponent_id, opponent_score, original_request_id + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) + `, game.UUID, game.Player0ID.Int32, 0, + getScore(finalScores, 0), getWon(game.WinnerIdx, 0), game.GameEndReason, + ratingBefore0, ratingAfter0, ratingDelta0, game.CreatedAt, game.Type, + game.Player1ID.Int32, getScore(finalScores, 1), originalRequestID) + if err != nil { + return fmt.Errorf("failed to insert player 0 into game_players: %v", err) + } + + // Player 1 + ratingBefore1, ratingAfter1, ratingDelta1 := extractRatingData(game.Quickdata, 1) + _, err = tx.Exec(ctx, ` + INSERT INTO game_players ( + game_uuid, player_id, player_index, score, won, game_end_reason, + rating_before, rating_after, rating_delta, created_at, game_type, + opponent_id, opponent_score, original_request_id + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) + `, game.UUID, game.Player1ID.Int32, 1, + getScore(finalScores, 1), getWon(game.WinnerIdx, 1), game.GameEndReason, + ratingBefore1, ratingAfter1, ratingDelta1, game.CreatedAt, game.Type, + game.Player0ID.Int32, getScore(finalScores, 0), originalRequestID) + if err != nil { + return fmt.Errorf("failed to insert player 1 into game_players: %v", err) + } + } + + // Update migration status + _, err = tx.Exec(ctx, ` + UPDATE games + SET migration_status = 1, updated_at = NOW() + WHERE uuid = $1 + `, game.UUID) + if err != nil { + return fmt.Errorf("failed to update migration status: %v", err) + } + + // Optionally clear the data from games table to save space + // Uncomment this if you want to clear data after migration + /* + _, err = tx.Exec(ctx, ` + UPDATE games + SET history = NULL, + stats = NULL, + quickdata = NULL, + timers = NULL, + meta_events = NULL, + request = NULL, + tournament_data = NULL, + player0_id = NULL, + player1_id = NULL, + migration_status = 2, + updated_at = NOW() + WHERE uuid = $1 + `, game.UUID) + if err != nil { + return fmt.Errorf("failed to clear game data: %v", err) + } + */ + + return tx.Commit(ctx) +} + +func getScore(scores []int32, playerIdx int) int32 { + if playerIdx < len(scores) { + return scores[playerIdx] + } + return 0 +} + +func getWon(winnerIdx, playerIdx int) sql.NullBool { + if winnerIdx == -1 { + // Tie + return sql.NullBool{Valid: false} + } + return sql.NullBool{Bool: winnerIdx == playerIdx, Valid: true} +} + +// extractRatingData extracts rating information from quickdata +func extractRatingData(quickdataJSON json.RawMessage, playerIdx int) (before, after sql.NullInt32, delta sql.NullInt32) { + var quickdata struct { + OriginalRatings []float64 `json:"OriginalRatings"` + NewRatings []float64 `json:"NewRatings"` + } + + if err := json.Unmarshal(quickdataJSON, &quickdata); err != nil { + return sql.NullInt32{}, sql.NullInt32{}, sql.NullInt32{} + } + + // Check if we have rating data for this player + if playerIdx < len(quickdata.OriginalRatings) && playerIdx < len(quickdata.NewRatings) { + beforeRating := int32(quickdata.OriginalRatings[playerIdx]) + afterRating := int32(quickdata.NewRatings[playerIdx]) + ratingDelta := afterRating - beforeRating + + return sql.NullInt32{Int32: beforeRating, Valid: true}, + sql.NullInt32{Int32: afterRating, Valid: true}, + sql.NullInt32{Int32: ratingDelta, Valid: true} + } + + return sql.NullInt32{}, sql.NullInt32{}, sql.NullInt32{} +} + +// extractOriginalRequestID extracts the original request ID from quickdata +func extractOriginalRequestID(quickdataJSON json.RawMessage) sql.NullString { + var quickdata struct { + OriginalRequestID string `json:"o"` + } + + if err := json.Unmarshal(quickdataJSON, &quickdata); err != nil { + return sql.NullString{} + } + + if quickdata.OriginalRequestID != "" { + return sql.NullString{String: quickdata.OriginalRequestID, Valid: true} + } + + return sql.NullString{} +} diff --git a/scripts/migrations/historical_games/run_historical_migration.sh b/scripts/migrations/historical_games/run_historical_migration.sh new file mode 100755 index 000000000..9308814e8 --- /dev/null +++ b/scripts/migrations/historical_games/run_historical_migration.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +# Historical Games Migration Script +# This script migrates completed games from the games table to past_games and game_players tables + +set -e + +# Configuration - use config file approach like other migrations +CONFIG_FILE="${CONFIG_FILE:-}" +BATCH_SIZE="${BATCH_SIZE:-100}" +START_OFFSET="${START_OFFSET:-0}" +LIMIT="${LIMIT:-0}" +DRY_RUN="${DRY_RUN:-true}" +VERBOSE="${VERBOSE:-false}" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +echo -e "${GREEN}Starting Historical Games Migration${NC}" +echo "Config File: ${CONFIG_FILE:-default config}" +echo "Batch Size: $BATCH_SIZE" +echo "Start Offset: $START_OFFSET" +echo "Limit: $LIMIT" +echo "Dry Run: $DRY_RUN" +echo "Verbose: $VERBOSE" +echo "" + +# Build the migration tool +echo -e "${YELLOW}Building migration tool...${NC}" +cd "$(dirname "$0")" +go build -o historical_games main.go + +if [ $? -ne 0 ]; then + echo -e "${RED}Failed to build migration tool${NC}" + exit 1 +fi + +# Prepare arguments +ARGS="" +if [ -n "$CONFIG_FILE" ]; then + ARGS="$ARGS -config=\"$CONFIG_FILE\"" +fi +ARGS="$ARGS -batch=$BATCH_SIZE" +ARGS="$ARGS -offset=$START_OFFSET" +ARGS="$ARGS -limit=$LIMIT" +ARGS="$ARGS -dry-run=$DRY_RUN" +ARGS="$ARGS -verbose=$VERBOSE" + +# Run the migration +echo -e "${YELLOW}Running migration...${NC}" +eval "./historical_games $ARGS" + +if [ $? -eq 0 ]; then + echo -e "${GREEN}Migration completed successfully!${NC}" +else + echo -e "${RED}Migration failed!${NC}" + exit 1 +fi + +# Clean up +rm -f historical_games + +echo -e "${GREEN}Done!${NC}" \ No newline at end of file diff --git a/sqlc.yaml b/sqlc.yaml index 1efcd99dd..a10dbeeba 100644 --- a/sqlc.yaml +++ b/sqlc.yaml @@ -18,23 +18,28 @@ sql: go_type: import: "github.com/woogles-io/liwords/pkg/entity" type: "Stats" + - column: "past_games.stats" + go_type: + import: "github.com/woogles-io/liwords/pkg/entity" + type: "Stats" - column: "games.quickdata" go_type: import: "github.com/woogles-io/liwords/pkg/entity" type: "Quickdata" - - column: "games.tournament_data" + - column: "past_games.quickdata" go_type: import: "github.com/woogles-io/liwords/pkg/entity" - type: "TournamentData" - - column: "games.meta_events" + type: "Quickdata" + - column: "games.tournament_data" go_type: import: "github.com/woogles-io/liwords/pkg/entity" - type: "MetaEventData" - - column: "games.request" + type: "TournamentData" + - column: "past_games.tournament_data" go_type: import: "github.com/woogles-io/liwords/pkg/entity" - type: "GameRequest" - - column: "games.game_request" + type: "TournamentData" + pointer: true + - column: "games.meta_events" go_type: import: "github.com/woogles-io/liwords/pkg/entity" - type: "GameRequest" + type: "MetaEventData"